[ 606.061160] env[62510]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62510) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.061504] env[62510]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62510) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.061634] env[62510]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62510) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 606.061966] env[62510]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 606.154294] env[62510]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62510) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 606.164936] env[62510]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62510) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 606.764153] env[62510]: INFO nova.virt.driver [None req-218ea883-a6c5-4c2e-83c3-783a149978bd None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 606.836362] env[62510]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.836529] env[62510]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.836604] env[62510]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62510) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 609.958414] env[62510]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-4a3a509e-78a4-4917-8ee4-1263489e819d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.974572] env[62510]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62510) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 609.974742] env[62510]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1c40e41e-5e60-480d-b5f2-0528c218c08e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.015060] env[62510]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 4a7c3. [ 610.015192] env[62510]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.179s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.015809] env[62510]: INFO nova.virt.vmwareapi.driver [None req-218ea883-a6c5-4c2e-83c3-783a149978bd None None] VMware vCenter version: 7.0.3 [ 610.019371] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0730e20b-68a7-4da7-a6c6-2234f7e997d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.037072] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bdd51c-ffad-464f-96c3-dc516020a4b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.043108] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96da131e-3266-4f4a-b19f-78693b8d65ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.049815] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d1e7a5-e025-4d60-8fd8-f1bdab04ca36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.063080] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82d50b4-b6c4-4d47-bbe2-0a63beefe7a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.069277] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3d08cb-2b7a-4a60-b655-ed65c9fdc8d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.100487] env[62510]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-ecf7ba8a-1c3d-49b9-9b62-f2d13c679bd0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.106401] env[62510]: DEBUG nova.virt.vmwareapi.driver [None req-218ea883-a6c5-4c2e-83c3-783a149978bd None None] Extension org.openstack.compute already exists. {{(pid=62510) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 610.109148] env[62510]: INFO nova.compute.provider_config [None req-218ea883-a6c5-4c2e-83c3-783a149978bd None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 610.612788] env[62510]: DEBUG nova.context [None req-218ea883-a6c5-4c2e-83c3-783a149978bd None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),30397125-4bea-420c-9507-e3aa48e41c67(cell1) {{(pid=62510) load_cells /opt/stack/nova/nova/context.py:464}} [ 610.614948] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.615275] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.616051] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.616497] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Acquiring lock "30397125-4bea-420c-9507-e3aa48e41c67" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 610.616689] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Lock "30397125-4bea-420c-9507-e3aa48e41c67" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.617768] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Lock "30397125-4bea-420c-9507-e3aa48e41c67" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.638932] env[62510]: INFO dbcounter [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Registered counter for database nova_cell0 [ 610.647008] env[62510]: INFO dbcounter [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Registered counter for database nova_cell1 [ 610.650496] env[62510]: DEBUG oslo_db.sqlalchemy.engines [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62510) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 610.650877] env[62510]: DEBUG oslo_db.sqlalchemy.engines [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62510) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 610.655836] env[62510]: ERROR nova.db.main.api [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.655836] env[62510]: result = function(*args, **kwargs) [ 610.655836] env[62510]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 610.655836] env[62510]: return func(*args, **kwargs) [ 610.655836] env[62510]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 610.655836] env[62510]: result = fn(*args, **kwargs) [ 610.655836] env[62510]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 610.655836] env[62510]: return f(*args, **kwargs) [ 610.655836] env[62510]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 610.655836] env[62510]: return db.service_get_minimum_version(context, binaries) [ 610.655836] env[62510]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 610.655836] env[62510]: _check_db_access() [ 610.655836] env[62510]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 610.655836] env[62510]: stacktrace = ''.join(traceback.format_stack()) [ 610.655836] env[62510]: [ 610.656658] env[62510]: ERROR nova.db.main.api [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 610.656658] env[62510]: result = function(*args, **kwargs) [ 610.656658] env[62510]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 610.656658] env[62510]: return func(*args, **kwargs) [ 610.656658] env[62510]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 610.656658] env[62510]: result = fn(*args, **kwargs) [ 610.656658] env[62510]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 610.656658] env[62510]: return f(*args, **kwargs) [ 610.656658] env[62510]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 610.656658] env[62510]: return db.service_get_minimum_version(context, binaries) [ 610.656658] env[62510]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 610.656658] env[62510]: _check_db_access() [ 610.656658] env[62510]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 610.656658] env[62510]: stacktrace = ''.join(traceback.format_stack()) [ 610.656658] env[62510]: [ 610.657079] env[62510]: WARNING nova.objects.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Failed to get minimum service version for cell 30397125-4bea-420c-9507-e3aa48e41c67 [ 610.657189] env[62510]: WARNING nova.objects.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 610.657611] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Acquiring lock "singleton_lock" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 610.657770] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Acquired lock "singleton_lock" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.658010] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Releasing lock "singleton_lock" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 610.658339] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Full set of CONF: {{(pid=62510) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 610.658484] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ******************************************************************************** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 610.658614] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] Configuration options gathered from: {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 610.658750] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 610.658953] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 610.659113] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ================================================================================ {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 610.659412] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] allow_resize_to_same_host = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.659542] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] arq_binding_timeout = 300 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.659635] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] backdoor_port = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.659762] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] backdoor_socket = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.659925] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] block_device_allocate_retries = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.660110] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] block_device_allocate_retries_interval = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.660285] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cert = self.pem {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.660449] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.660617] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute_monitors = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.660785] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] config_dir = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.660956] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] config_drive_format = iso9660 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.661115] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.661289] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] config_source = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.661515] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] console_host = devstack {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.661703] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] control_exchange = nova {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.661863] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cpu_allocation_ratio = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.662064] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] daemon = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.662258] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] debug = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.662424] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] default_access_ip_network_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.662593] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] default_availability_zone = nova {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.662754] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] default_ephemeral_format = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.662917] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] default_green_pool_size = 1000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.663178] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.663349] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] default_schedule_zone = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.663509] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] disk_allocation_ratio = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.663673] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] enable_new_services = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.663853] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] enabled_apis = ['osapi_compute'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.664041] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] enabled_ssl_apis = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.664208] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] flat_injected = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.664368] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] force_config_drive = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.664532] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] force_raw_images = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.664701] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] graceful_shutdown_timeout = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.664892] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] heal_instance_info_cache_interval = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.665165] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] host = cpu-1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.665379] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.665556] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] initial_disk_allocation_ratio = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.665723] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] initial_ram_allocation_ratio = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.665984] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.666136] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instance_build_timeout = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.666302] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instance_delete_interval = 300 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.666472] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instance_format = [instance: %(uuid)s] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.666640] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instance_name_template = instance-%08x {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.666803] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instance_usage_audit = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.666984] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instance_usage_audit_period = month {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.667162] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.667337] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] instances_path = /opt/stack/data/nova/instances {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.667505] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] internal_service_availability_zone = internal {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.667658] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] key = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.667821] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] live_migration_retry_count = 30 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.667993] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_color = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.668207] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_config_append = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.668383] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.668544] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_dir = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.668704] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_file = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.668836] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_options = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.669026] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_rotate_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.669189] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_rotate_interval_type = days {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.669359] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] log_rotation_type = none {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.669488] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.669620] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.669792] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.669963] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.670104] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.670268] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] long_rpc_timeout = 1800 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.670427] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] max_concurrent_builds = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.670586] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] max_concurrent_live_migrations = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.670746] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] max_concurrent_snapshots = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.670900] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] max_local_block_devices = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.671085] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] max_logfile_count = 30 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.671265] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] max_logfile_size_mb = 200 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.671429] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] maximum_instance_delete_attempts = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.671595] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] metadata_listen = 0.0.0.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.671760] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] metadata_listen_port = 8775 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.671925] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] metadata_workers = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.672101] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] migrate_max_retries = -1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.672275] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] mkisofs_cmd = genisoimage {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.672478] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] my_block_storage_ip = 10.180.1.21 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.672609] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] my_ip = 10.180.1.21 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.672849] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.672983] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] network_allocate_retries = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.673186] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.673360] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] osapi_compute_listen = 0.0.0.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.673524] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] osapi_compute_listen_port = 8774 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.673690] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] osapi_compute_unique_server_name_scope = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.673858] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] osapi_compute_workers = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.674050] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] password_length = 12 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.674235] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] periodic_enable = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.674400] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] periodic_fuzzy_delay = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.674576] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] pointer_model = usbtablet {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.674743] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] preallocate_images = none {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.674934] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] publish_errors = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.675075] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] pybasedir = /opt/stack/nova {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.675239] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ram_allocation_ratio = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.675402] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] rate_limit_burst = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.675572] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] rate_limit_except_level = CRITICAL {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.675734] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] rate_limit_interval = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.675898] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] reboot_timeout = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.676069] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] reclaim_instance_interval = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.676229] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] record = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.676397] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] reimage_timeout_per_gb = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.676562] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] report_interval = 120 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.676719] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] rescue_timeout = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.676877] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] reserved_host_cpus = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.677063] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] reserved_host_disk_mb = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.677250] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] reserved_host_memory_mb = 512 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.677409] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] reserved_huge_pages = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.677568] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] resize_confirm_window = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.677726] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] resize_fs_using_block_device = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.677885] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] resume_guests_state_on_host_boot = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.678066] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.678231] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] rpc_response_timeout = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.678388] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] run_external_periodic_tasks = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.678556] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] running_deleted_instance_action = reap {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.678715] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] running_deleted_instance_poll_interval = 1800 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.678876] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] running_deleted_instance_timeout = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.679042] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler_instance_sync_interval = 120 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.679213] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_down_time = 720 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.679380] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] servicegroup_driver = db {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.679537] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] shell_completion = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.679693] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] shelved_offload_time = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.679850] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] shelved_poll_interval = 3600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.680030] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] shutdown_timeout = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.680222] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] source_is_ipv6 = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.680388] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ssl_only = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.680638] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.680839] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] sync_power_state_interval = 600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.680967] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] sync_power_state_pool_size = 1000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.681156] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] syslog_log_facility = LOG_USER {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.681316] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] tempdir = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.681478] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] timeout_nbd = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.681648] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] transport_url = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.681809] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] update_resources_interval = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.681974] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] use_cow_images = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.682147] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] use_eventlog = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.682311] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] use_journal = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.682469] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] use_json = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.682626] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] use_rootwrap_daemon = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.682786] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] use_stderr = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.682946] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] use_syslog = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.683139] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vcpu_pin_set = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.683328] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plugging_is_fatal = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.683498] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plugging_timeout = 300 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.683665] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] virt_mkfs = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.683827] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] volume_usage_poll_interval = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.683992] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] watch_log_file = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.684177] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] web = /usr/share/spice-html5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 610.684362] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.684529] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.684693] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.684885] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_concurrency.disable_process_locking = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.685495] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.685693] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.685871] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.686093] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.686280] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.686452] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.686636] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.auth_strategy = keystone {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.686810] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.compute_link_prefix = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.686999] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.687197] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.dhcp_domain = novalocal {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.687371] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.enable_instance_password = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.687538] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.glance_link_prefix = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.687707] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.687882] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.688064] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.instance_list_per_project_cells = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.688234] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.list_records_by_skipping_down_cells = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.688399] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.local_metadata_per_cell = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.688568] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.max_limit = 1000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.688735] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.metadata_cache_expiration = 15 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.688910] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.neutron_default_tenant_id = default {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.689134] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.response_validation = warn {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.689314] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.use_neutron_default_nets = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.689485] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.689650] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.689820] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.689998] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.690186] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.vendordata_dynamic_targets = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.690353] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.vendordata_jsonfile_path = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.690535] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.690730] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.backend = dogpile.cache.memcached {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.690902] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.backend_argument = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.691067] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.backend_expiration_time = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.691244] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.config_prefix = cache.oslo {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.691414] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.dead_timeout = 60.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.691581] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.debug_cache_backend = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.691745] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.enable_retry_client = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.691907] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.enable_socket_keepalive = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.692153] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.enabled = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.692342] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.enforce_fips_mode = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.692511] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.expiration_time = 600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.692674] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.hashclient_retry_attempts = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.692843] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.hashclient_retry_delay = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.693023] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_dead_retry = 300 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.693193] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_password = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.693359] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.693523] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.693685] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_pool_maxsize = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.693846] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.694035] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_sasl_enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.694230] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.694400] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_socket_timeout = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.694563] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.memcache_username = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.694728] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.proxies = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.694916] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.redis_db = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.695122] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.redis_password = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.695308] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.redis_sentinel_service_name = mymaster {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.695486] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.695659] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.redis_server = localhost:6379 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.695826] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.redis_socket_timeout = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.695988] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.redis_username = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.696168] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.retry_attempts = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.696336] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.retry_delay = 0.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.696499] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.socket_keepalive_count = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.696667] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.socket_keepalive_idle = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.696826] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.socket_keepalive_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.696988] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.tls_allowed_ciphers = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.697163] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.tls_cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.697352] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.tls_certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.697526] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.tls_enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.697687] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cache.tls_keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.697860] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.698075] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.auth_type = password {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.698253] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.698434] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.catalog_info = volumev3::publicURL {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.698598] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.698762] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.698926] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.cross_az_attach = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.699105] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.debug = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.699271] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.endpoint_template = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.699438] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.http_retries = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.699651] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.699821] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.699999] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.os_region_name = RegionOne {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.700184] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.700346] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cinder.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.700518] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.700682] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.cpu_dedicated_set = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.700843] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.cpu_shared_set = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.701074] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.image_type_exclude_list = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.701228] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.701397] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.max_concurrent_disk_ops = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.701571] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.max_disk_devices_to_attach = -1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.701772] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.701950] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.702133] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.resource_provider_association_refresh = 300 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.702298] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.702462] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.shutdown_retry_interval = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.702646] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.702840] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] conductor.workers = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.703015] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] console.allowed_origins = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.703182] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] console.ssl_ciphers = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.703354] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] console.ssl_minimum_version = default {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.703524] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] consoleauth.enforce_session_timeout = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.703692] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] consoleauth.token_ttl = 600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.703861] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.704056] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.704246] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.704409] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.704568] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.704724] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.704918] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.705102] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.705266] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.705424] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.705583] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.705741] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.705926] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.706121] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.service_type = accelerator {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.706292] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.706451] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.706611] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.706767] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.706945] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.707146] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] cyborg.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.707336] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.backend = sqlalchemy {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.707508] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.connection = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.707674] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.connection_debug = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.707845] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.connection_parameters = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.708020] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.connection_recycle_time = 3600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.708187] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.connection_trace = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.708371] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.db_inc_retry_interval = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.708536] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.db_max_retries = 20 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.708699] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.db_max_retry_interval = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.708863] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.db_retry_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.709037] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.max_overflow = 50 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.709202] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.max_pool_size = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.709363] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.max_retries = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.709534] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.709694] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.mysql_wsrep_sync_wait = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.709850] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.pool_timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.710063] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.retry_interval = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.710270] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.slave_connection = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.710443] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.sqlite_synchronous = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.710609] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] database.use_db_reconnect = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.710791] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.backend = sqlalchemy {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.710965] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.connection = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.711184] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.connection_debug = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.711322] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.connection_parameters = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.711488] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.connection_recycle_time = 3600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.711651] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.connection_trace = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.711816] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.db_inc_retry_interval = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.711983] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.db_max_retries = 20 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.712163] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.db_max_retry_interval = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.712325] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.db_retry_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.712489] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.max_overflow = 50 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.712652] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.max_pool_size = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.712818] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.max_retries = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.712994] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.713195] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.713361] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.pool_timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.713524] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.retry_interval = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.713730] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.slave_connection = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.713965] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] api_database.sqlite_synchronous = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.714173] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] devices.enabled_mdev_types = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.714359] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.714536] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ephemeral_storage_encryption.default_format = luks {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.714704] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ephemeral_storage_encryption.enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.714892] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.715090] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.api_servers = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.715263] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.715427] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.715593] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.715754] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.715928] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.716127] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.debug = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.716298] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.default_trusted_certificate_ids = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.716462] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.enable_certificate_validation = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.716626] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.enable_rbd_download = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.716784] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.716950] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.717131] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.717289] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.717443] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.717605] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.num_retries = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.717777] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.rbd_ceph_conf = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.717942] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.rbd_connect_timeout = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.718137] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.rbd_pool = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.718308] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.rbd_user = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.718470] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.718632] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.718791] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.718990] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.service_type = image {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.719186] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.719351] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.719513] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.719673] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.719857] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.720038] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.verify_glance_signatures = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.720245] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] glance.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.720485] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] guestfs.debug = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.720672] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.720842] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.auth_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.721017] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.721187] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.721355] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.721517] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.721678] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.721842] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.722044] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.722226] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.722388] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.722547] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.722704] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.722863] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.723030] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.723207] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.service_type = shared-file-system {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.723371] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.share_apply_policy_timeout = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.723537] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.723695] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.723853] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.724019] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.724203] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.724362] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] manila.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.724528] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] mks.enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.724914] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.725142] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] image_cache.manager_interval = 2400 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.725325] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] image_cache.precache_concurrency = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.725499] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] image_cache.remove_unused_base_images = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.725670] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.725844] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.726032] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] image_cache.subdirectory_name = _base {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.726217] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.api_max_retries = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.726382] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.api_retry_interval = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.726543] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.726707] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.auth_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.726864] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.727034] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.727207] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.727376] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.conductor_group = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.727536] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.727698] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.727857] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.728051] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.728236] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.728398] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.728559] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.728725] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.peer_list = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.728886] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.729060] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.729234] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.serial_console_state_timeout = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.729396] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.729565] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.service_type = baremetal {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.729724] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.shard = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.729888] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.730057] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.730218] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.730377] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.730557] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.730715] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ironic.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.730901] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.731107] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] key_manager.fixed_key = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.731310] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.731475] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.barbican_api_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.731636] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.barbican_endpoint = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.731809] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.barbican_endpoint_type = public {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.731972] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.barbican_region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.732143] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.732302] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.732465] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.732662] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.732841] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.732999] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.number_of_retries = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.733177] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.retry_delay = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.733340] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.send_service_user_token = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.733504] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.733663] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.733825] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.verify_ssl = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.733984] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican.verify_ssl_path = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.734165] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.734327] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.auth_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.734487] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.734642] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.734832] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.734985] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.735163] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.735329] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.735488] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] barbican_service_user.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.735680] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.approle_role_id = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.735853] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.approle_secret_id = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.736032] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.kv_mountpoint = secret {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.736198] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.kv_path = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.736363] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.kv_version = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.736521] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.namespace = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.736678] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.root_token_id = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.736835] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.ssl_ca_crt_file = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.737013] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.timeout = 60.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.737182] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.use_ssl = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.737353] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.737518] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.737677] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.737840] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.737999] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.738173] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.738335] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.738495] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.738677] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.738846] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.739009] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.739178] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.739338] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.739496] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.739666] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.service_type = identity {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.739832] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.739998] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.740175] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.740336] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.740513] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.740671] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] keystone.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.740860] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.ceph_mount_options = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.741201] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.741388] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.connection_uri = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.741555] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.cpu_mode = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.741754] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.cpu_model_extra_flags = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.741933] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.cpu_models = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.742127] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.cpu_power_governor_high = performance {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.742302] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.cpu_power_governor_low = powersave {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.742470] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.cpu_power_management = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.742646] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.742814] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.device_detach_attempts = 8 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.742981] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.device_detach_timeout = 20 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.743163] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.disk_cachemodes = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.743331] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.disk_prefix = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.743499] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.enabled_perf_events = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.743664] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.file_backed_memory = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.743837] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.gid_maps = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.744007] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.hw_disk_discard = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.744176] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.hw_machine_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.744348] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.images_rbd_ceph_conf = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.744514] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.744711] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.744910] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.images_rbd_glance_store_name = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.745105] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.images_rbd_pool = rbd {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.745282] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.images_type = default {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.745444] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.images_volume_group = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.745609] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.inject_key = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.745776] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.inject_partition = -2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.745953] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.inject_password = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.746148] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.iscsi_iface = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.746318] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.iser_use_multipath = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.746485] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_bandwidth = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.746649] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.746814] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_downtime = 500 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.746978] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.747155] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.747317] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_inbound_addr = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.747480] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.747669] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_permit_post_copy = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.747844] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_scheme = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.748037] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_timeout_action = abort {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.748209] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_tunnelled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.748372] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_uri = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.748538] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.live_migration_with_native_tls = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.748701] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.max_queues = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.748868] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.749132] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.749301] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.nfs_mount_options = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.749602] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.749783] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.750045] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.num_iser_scan_tries = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.750233] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.num_memory_encrypted_guests = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.750405] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.750572] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.num_pcie_ports = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.750766] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.num_volume_scan_tries = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.750940] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.pmem_namespaces = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.751120] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.quobyte_client_cfg = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.751426] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.751610] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rbd_connect_timeout = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.751771] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.751939] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.752118] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rbd_secret_uuid = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.752284] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rbd_user = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.752445] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.752617] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.remote_filesystem_transport = ssh {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.752779] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rescue_image_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.752937] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rescue_kernel_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.753110] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rescue_ramdisk_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.753284] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.753445] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.rx_queue_size = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.753618] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.smbfs_mount_options = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.753933] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.754137] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.snapshot_compression = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.754307] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.snapshot_image_format = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.754550] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.754721] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.sparse_logical_volumes = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.754908] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.swtpm_enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.755104] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.swtpm_group = tss {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.755276] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.swtpm_user = tss {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.755447] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.sysinfo_serial = unique {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.755609] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.tb_cache_size = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.755768] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.tx_queue_size = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.755956] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.uid_maps = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.756154] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.use_virtio_for_bridges = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.756334] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.virt_type = kvm {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.756505] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.volume_clear = zero {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.756671] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.volume_clear_size = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.756839] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.volume_use_multipath = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.757013] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.vzstorage_cache_path = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.757189] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.757360] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.vzstorage_mount_group = qemu {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.757530] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.vzstorage_mount_opts = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.757701] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.757983] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.758184] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.vzstorage_mount_user = stack {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.758355] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.758526] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.758707] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.auth_type = password {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.758869] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.759069] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.759250] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.759414] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.759575] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.759748] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.default_floating_pool = public {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.759909] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.760087] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.extension_sync_interval = 600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.760253] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.http_retries = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.760413] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.760572] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.760732] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.760904] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.761078] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.761253] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.ovs_bridge = br-int {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.761418] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.physnets = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.761588] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.region_name = RegionOne {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.761756] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.761908] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.service_metadata_proxy = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.762109] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.762290] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.service_type = network {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.762459] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.762616] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.762773] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.762935] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.763132] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.763298] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] neutron.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.763475] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] notifications.bdms_in_notifications = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.763651] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] notifications.default_level = INFO {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.763844] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] notifications.notification_format = unversioned {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.764030] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] notifications.notify_on_state_change = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.764229] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.764408] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] pci.alias = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.764579] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] pci.device_spec = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.764741] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] pci.report_in_placement = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.764947] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.765168] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.auth_type = password {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.765348] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.765512] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.765670] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.765833] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.765993] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.766166] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.766326] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.default_domain_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.766482] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.default_domain_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.766637] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.domain_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767046] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.domain_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767046] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767152] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767278] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767435] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767591] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767760] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.password = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.767918] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.project_domain_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.768135] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.project_domain_name = Default {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.768311] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.project_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.768487] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.project_name = service {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.768659] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.region_name = RegionOne {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.768823] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.769041] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.769252] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.service_type = placement {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.769423] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.769584] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.769744] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.769904] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.system_scope = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.770079] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.770242] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.trust_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.770402] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.user_domain_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.770572] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.user_domain_name = Default {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.770732] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.user_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.770915] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.username = nova {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.771135] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.771307] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] placement.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.771487] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.cores = 20 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.771651] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.count_usage_from_placement = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.771837] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.772023] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.injected_file_content_bytes = 10240 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.772189] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.injected_file_path_length = 255 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.772356] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.injected_files = 5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.772521] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.instances = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.772687] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.key_pairs = 100 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.772884] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.metadata_items = 128 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.773047] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.ram = 51200 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.773213] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.recheck_quota = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.773379] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.server_group_members = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.773545] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.server_groups = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.773755] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.773931] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] quota.unified_limits_resource_strategy = require {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.774144] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.774319] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.774483] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.image_metadata_prefilter = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.774643] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.774837] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.max_attempts = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.774997] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.max_placement_results = 1000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.775179] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.775344] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.query_placement_for_image_type_support = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.775505] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.775679] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] scheduler.workers = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.775853] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.776037] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.776221] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.776392] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.776557] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.776723] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.776887] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.777111] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.777298] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.host_subset_size = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.777465] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.777628] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.777792] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.777957] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.isolated_hosts = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.778138] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.isolated_images = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.778300] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.778459] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.778624] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.778785] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.pci_in_placement = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.778959] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.779140] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.779301] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.779459] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.779619] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.779780] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.779937] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.track_instance_changes = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.780159] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.780337] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] metrics.required = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.780503] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] metrics.weight_multiplier = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.780663] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.780827] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] metrics.weight_setting = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.781164] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.781343] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] serial_console.enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.781520] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] serial_console.port_range = 10000:20000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.781692] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.781863] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.782044] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] serial_console.serialproxy_port = 6083 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.782216] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.782389] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.auth_type = password {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.782549] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.782709] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.782875] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.783122] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.783358] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.783590] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.send_service_user_token = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.783784] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.783952] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] service_user.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.784143] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.agent_enabled = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.784309] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.784628] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.784851] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.785051] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.html5proxy_port = 6082 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.785228] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.image_compression = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.785390] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.jpeg_compression = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.785551] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.playback_compression = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.785712] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.require_secure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.785883] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.server_listen = 127.0.0.1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.786095] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.786266] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.streaming_mode = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.786427] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] spice.zlib_compression = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.786594] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] upgrade_levels.baseapi = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.786764] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] upgrade_levels.compute = auto {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.786923] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] upgrade_levels.conductor = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.787096] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] upgrade_levels.scheduler = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.787263] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.787423] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.auth_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.787580] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.787740] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.787905] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.788078] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.788240] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.788400] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.788559] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vendordata_dynamic_auth.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.788732] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.api_retry_count = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.788891] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.ca_file = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.789110] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.cache_prefix = devstack-image-cache {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.789293] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.cluster_name = testcl1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.789460] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.connection_pool_size = 10 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.789621] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.console_delay_seconds = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.789791] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.datastore_regex = ^datastore.* {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.790009] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.790197] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.host_password = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.790364] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.host_port = 443 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.790533] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.host_username = administrator@vsphere.local {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.790701] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.insecure = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.790863] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.integration_bridge = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.791042] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.maximum_objects = 100 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.791210] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.pbm_default_policy = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.791371] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.pbm_enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.791527] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.pbm_wsdl_location = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.791698] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.791857] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.serial_port_proxy_uri = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.792071] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.serial_port_service_uri = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.792235] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.task_poll_interval = 0.5 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.792414] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.use_linked_clone = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.792587] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.vnc_keymap = en-us {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.792751] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.vnc_port = 5900 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.792914] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vmware.vnc_port_total = 10000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.793121] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.auth_schemes = ['none'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.793303] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.793599] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.793785] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.793957] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.novncproxy_port = 6080 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.794213] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.server_listen = 127.0.0.1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.794423] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.794592] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.vencrypt_ca_certs = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.794754] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.vencrypt_client_cert = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.794945] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vnc.vencrypt_client_key = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.795145] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.795314] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.disable_deep_image_inspection = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.795476] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.795638] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.795803] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.795965] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.disable_rootwrap = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.796144] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.enable_numa_live_migration = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.796311] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.796472] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.796633] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.796794] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.libvirt_disable_apic = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.796955] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.797155] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.797342] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.797505] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.797667] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.797827] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.797990] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.798166] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.798327] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.798491] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.798678] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.798846] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.client_socket_timeout = 900 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.799024] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.default_pool_size = 1000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.799196] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.keep_alive = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.799364] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.max_header_line = 16384 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.799526] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.secure_proxy_ssl_header = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.799689] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.ssl_ca_file = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.799851] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.ssl_cert_file = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.800025] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.ssl_key_file = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.800214] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.tcp_keepidle = 600 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.800403] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.800570] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] zvm.ca_file = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.800732] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] zvm.cloud_connector_url = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.801040] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.801224] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] zvm.reachable_timeout = 300 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.801399] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.801576] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.801752] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.connection_string = messaging:// {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.801919] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.enabled = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.802101] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.es_doc_type = notification {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.802268] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.es_scroll_size = 10000 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.802435] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.es_scroll_time = 2m {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.802596] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.filter_error_trace = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.802761] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.hmac_keys = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.802936] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.sentinel_service_name = mymaster {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.803105] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.socket_timeout = 0.1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.803296] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.trace_requests = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.803467] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler.trace_sqlalchemy = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.803658] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler_jaeger.process_tags = {} {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.803821] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler_jaeger.service_name_prefix = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.804038] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] profiler_otlp.service_name_prefix = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.804169] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] remote_debug.host = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.804331] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] remote_debug.port = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.804510] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.804672] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.804857] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.805045] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.805219] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.805380] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.805540] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.805701] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.805862] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.806073] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.806315] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.806416] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.806579] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.806749] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.806919] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.807101] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.807267] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.807440] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.807603] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.807764] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.807924] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.808102] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.808267] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.808433] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.808595] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.808757] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.808918] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.809118] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.809297] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.809462] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.ssl = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.809633] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.809801] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.809965] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.810149] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.810319] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.ssl_version = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.810481] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.810680] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.810850] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_notifications.retry = -1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.811045] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.811285] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_messaging_notifications.transport_url = **** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.811475] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.auth_section = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.811642] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.auth_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.811819] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.cafile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.811984] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.certfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.812165] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.collect_timing = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.812326] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.connect_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.812479] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.connect_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.812635] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.endpoint_id = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.812806] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.endpoint_interface = publicURL {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.812965] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.endpoint_override = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.813138] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.endpoint_region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.813297] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.endpoint_service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.813457] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.endpoint_service_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.813620] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.insecure = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.813781] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.keyfile = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.813934] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.max_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.814108] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.min_version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.814290] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.region_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.814455] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.retriable_status_codes = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.814612] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.service_name = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.814769] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.service_type = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.814959] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.split_loggers = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.815139] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.status_code_retries = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.815299] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.status_code_retry_delay = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.815454] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.timeout = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.815611] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.valid_interfaces = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.815764] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_limit.version = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.815946] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_reports.file_event_handler = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.816143] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.816305] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] oslo_reports.log_dir = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.816477] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.816636] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.816797] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.816964] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.817158] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.817341] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.817518] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.817679] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_ovs_privileged.group = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.817838] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.818013] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.818184] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.818347] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] vif_plug_ovs_privileged.user = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.818516] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.flat_interface = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.818696] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.818871] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.819055] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.819230] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.819397] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.819561] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.819725] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.819899] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.820085] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_ovs.isolate_vif = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.820280] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.820455] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.820627] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.820800] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_ovs.ovsdb_interface = native {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.820964] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] os_vif_ovs.per_port_bridge = False {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.821151] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] privsep_osbrick.capabilities = [21] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.821308] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] privsep_osbrick.group = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.821466] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] privsep_osbrick.helper_command = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.821630] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.821792] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.821951] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] privsep_osbrick.user = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.822138] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.822296] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] nova_sys_admin.group = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.822452] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] nova_sys_admin.helper_command = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.822617] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.822775] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.822932] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] nova_sys_admin.user = None {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 610.823073] env[62510]: DEBUG oslo_service.service [None req-2aed77c1-8466-4130-8371-2094225db0b2 None None] ******************************************************************************** {{(pid=62510) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 610.823531] env[62510]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 611.327368] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Getting list of instances from cluster (obj){ [ 611.327368] env[62510]: value = "domain-c8" [ 611.327368] env[62510]: _type = "ClusterComputeResource" [ 611.327368] env[62510]: } {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 611.328477] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab250f1-e404-447c-a14c-5f9c90eac9a4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.337251] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Got total of 0 instances {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 611.337828] env[62510]: WARNING nova.virt.vmwareapi.driver [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 611.338317] env[62510]: INFO nova.virt.node [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Generated node identity c3653102-341b-4ed1-8b1f-1abaf8aa3e56 [ 611.338549] env[62510]: INFO nova.virt.node [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Wrote node identity c3653102-341b-4ed1-8b1f-1abaf8aa3e56 to /opt/stack/data/n-cpu-1/compute_id [ 611.841626] env[62510]: WARNING nova.compute.manager [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Compute nodes ['c3653102-341b-4ed1-8b1f-1abaf8aa3e56'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 612.848155] env[62510]: INFO nova.compute.manager [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 613.854144] env[62510]: WARNING nova.compute.manager [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 613.854546] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.854672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.854851] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.855026] env[62510]: DEBUG nova.compute.resource_tracker [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 613.855944] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b90eb0-7034-468f-8892-fe1521b44ada {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.864107] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8a5fd0-0796-471e-b945-f2f35149538c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.878778] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997ef234-b807-4614-8d86-4332a580830a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.885162] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2323dd-fbe3-4f2c-b5d8-43569826c9e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.913675] env[62510]: DEBUG nova.compute.resource_tracker [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181401MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 613.913828] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.914044] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.417248] env[62510]: WARNING nova.compute.resource_tracker [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] No compute node record for cpu-1:c3653102-341b-4ed1-8b1f-1abaf8aa3e56: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c3653102-341b-4ed1-8b1f-1abaf8aa3e56 could not be found. [ 614.921372] env[62510]: INFO nova.compute.resource_tracker [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 [ 616.430588] env[62510]: DEBUG nova.compute.resource_tracker [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 616.430985] env[62510]: DEBUG nova.compute.resource_tracker [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 616.619840] env[62510]: INFO nova.scheduler.client.report [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] [req-792f0c22-66ec-4d80-8926-5a16489d3da2] Created resource provider record via placement API for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 616.642625] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7bacf1-6021-4289-9f6e-0826a5811973 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.650542] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7c658f-b52b-443a-ab28-60f58f0b6a18 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.680837] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d9b1b4-4020-4433-87db-81b512da6e91 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.688199] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c22c9f9-b3b1-41e4-8a00-9902364fbf7f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.701278] env[62510]: DEBUG nova.compute.provider_tree [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 617.238214] env[62510]: DEBUG nova.scheduler.client.report [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 617.238443] env[62510]: DEBUG nova.compute.provider_tree [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 0 to 1 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 617.238584] env[62510]: DEBUG nova.compute.provider_tree [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 617.285302] env[62510]: DEBUG nova.compute.provider_tree [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 1 to 2 during operation: update_traits {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 617.790154] env[62510]: DEBUG nova.compute.resource_tracker [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 617.790473] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.876s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.790509] env[62510]: DEBUG nova.service [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Creating RPC server for service compute {{(pid=62510) start /opt/stack/nova/nova/service.py:186}} [ 617.805710] env[62510]: DEBUG nova.service [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] Join ServiceGroup membership for this service compute {{(pid=62510) start /opt/stack/nova/nova/service.py:203}} [ 617.805970] env[62510]: DEBUG nova.servicegroup.drivers.db [None req-be6b2210-19dc-4e16-9dce-880d8e7fd694 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62510) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 627.808871] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_power_states {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 628.312113] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Getting list of instances from cluster (obj){ [ 628.312113] env[62510]: value = "domain-c8" [ 628.312113] env[62510]: _type = "ClusterComputeResource" [ 628.312113] env[62510]: } {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 628.313315] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18144af-4b08-4804-86af-a0b33f8c2247 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.322091] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Got total of 0 instances {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 628.322323] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 628.322611] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Getting list of instances from cluster (obj){ [ 628.322611] env[62510]: value = "domain-c8" [ 628.322611] env[62510]: _type = "ClusterComputeResource" [ 628.322611] env[62510]: } {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 628.323497] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c0ba48-704e-46bc-ac2e-06b66db20abf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.330626] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Got total of 0 instances {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 666.217572] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.217572] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.217572] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 666.217572] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 666.722401] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 666.722401] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.722401] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.722401] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.722401] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.722401] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.722810] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 666.722810] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 666.722810] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.226183] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.226183] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 667.226183] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 667.226183] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 667.226183] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f2e031-befd-4e8c-b85f-aada802cb712 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.234302] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce57f76-99f6-40ce-b799-0c5c05ff8dd7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.247991] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8ef14b-7d61-42d3-92a0-0a8e10f3ff35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.253996] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba372f4-30ee-4e44-8754-7563a1123865 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.282385] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181401MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 667.282525] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 667.282693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.300487] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 668.300809] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 668.316056] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b8daac-25c7-4962-8535-8438a5c7b05a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.324287] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0137ce2c-b502-4ade-b595-3bed28abd90e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.352957] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd70759-2c4e-47ab-af91-635f97fd8732 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.360158] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d4257b-a84c-42dd-afdc-d804a09febaa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.372806] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 668.876270] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 669.381767] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 669.382127] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.099s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.368761] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.369169] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.877021] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 729.877021] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 729.877021] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 730.376635] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 730.377052] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.377052] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.377211] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.377360] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.377499] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.377674] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.377808] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 730.377948] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 730.880773] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.881031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.881199] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.881354] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 730.882278] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc66c710-1a40-49e6-a5e0-538088a4e068 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.890563] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0de1178-6513-42f8-972d-82f6f91fdf54 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.903891] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1b5f8a-fa04-44fd-b796-374b1b7f6eff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.910387] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2956d187-8a1e-4e22-a7b8-9758eaaddf50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.938838] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181383MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 730.938981] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 730.939187] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.961779] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 731.962037] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 731.978028] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab57ccd4-1f0c-493e-9863-9cd4c609f78c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.983253] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205bd707-7032-4600-9c47-c423634c0906 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.012487] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c57ef82-e275-4b0d-9fe6-bc9081d31130 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.020066] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d09d00d-f3f3-4df5-b9fb-885dbed1b61b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.032770] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.535642] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 732.536894] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 732.537083] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.598s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.538671] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 792.539144] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 792.539144] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 792.539268] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 793.042950] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 793.043230] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.043368] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.043544] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.043704] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.043847] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.044012] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.044142] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 793.044282] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 793.548061] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.548061] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.548061] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.548061] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 793.548528] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64062a9-e401-47bf-8767-de5e3c34065c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.556914] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16272c1b-7e78-4d27-8f81-5d9e78e101b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.570313] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abc72e0-89df-4b9f-be17-959550aaa53a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.576229] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcefa040-d7fe-44cb-a85a-88afbc4fb7ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.604705] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 793.604836] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.605017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 794.626506] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 794.626746] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 794.639492] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b4ad28-efa3-485f-ae82-0a5aa4bde4d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.646625] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-903509fd-d57e-42a8-92e6-3bedc76b2bb8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.675216] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df461edb-fcac-45d2-9b28-8d45ed1288f3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.681644] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801ac900-b512-4758-9c1a-f3b88586cc53 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.694011] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.197135] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 795.198449] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 795.198639] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.863575] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.370593] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.370593] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 849.370593] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 849.872606] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 849.873040] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.873172] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.873303] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.873437] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 849.873582] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.376921] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.377179] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.377332] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.377531] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 850.378436] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4fedc4-6d75-4d23-bb60-ef5f24e37bee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.386665] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e1f8ce-1d4b-4901-9c5c-513cc916b1cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.400352] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4afb507-34fe-4651-a9cc-3b6e3487a660 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.406124] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c87b87-73e8-403b-8033-cfbe1a53f3cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.433597] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 850.433734] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.433917] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.451347] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 851.451585] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 851.464499] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf9fa41-47f8-4ed7-a332-b62881f921ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.472133] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-997b8977-ccf1-4880-aa85-01dc30056e6b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.501721] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba22c33-5ffa-4b95-85c5-c3a704875190 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.508302] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b2597f-beed-4a58-8188-bc57c74c903f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.520854] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.023698] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 852.025176] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 852.025404] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.359571] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.359801] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.359957] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.360146] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.206985] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.207393] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 906.710609] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] There are 0 instances to clean {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 906.710843] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 906.710984] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances with incomplete migration {{(pid=62510) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 907.213579] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.716579] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.716961] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 908.716961] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 909.220136] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 909.221790] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.221790] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.221790] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 909.724069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.724437] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.724476] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.724643] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 909.725556] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba3a651-92a8-4d5c-b28b-1312b550ff5c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.733893] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253e91b2-74a8-45c1-af1b-825034eb0365 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.747593] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf128e8-d842-4552-98a7-5d66888a5630 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.753652] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9772b8-bf93-4b2c-8fbc-37da417aef2d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.781190] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181393MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 909.781326] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.781512] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.799784] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 910.800052] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 910.812271] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5b402e-32ac-492b-9d22-6502ff5e16a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.820244] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6287342d-4af2-4a78-a4ae-a365ca48f758 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.849032] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fdb627-15b2-42b5-86f7-dfb0799fd6dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.855689] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac10639-3ee9-4602-932b-fb29517dca62 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.868255] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.372017] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 911.373380] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 911.373580] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.360358] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.360772] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.360772] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.361125] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.361125] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.361315] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 968.203085] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.709067] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.207129] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.207517] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.207617] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 970.207747] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 970.710847] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 970.711161] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.214048] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.214351] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.214351] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.214417] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 971.215623] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c1d521-039e-4e1e-bde7-bc1daa9272c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.223768] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69544edb-cfd7-4ba6-ba35-256a7ef2bf1c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.237550] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32113039-b824-4f44-9b38-145a0f09de59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.243605] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb757e20-6b6f-4af2-9815-4a40cc1e440c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.271739] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181390MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 971.271875] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.272061] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.306580] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 972.306828] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 972.324919] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 972.339357] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 972.339529] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.351333] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 972.366997] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 972.378356] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abb5b6a-fd00-4ff1-8bb7-20a19dd9c5ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.385587] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f8577c-608a-4e7d-b35e-5dd6c2b39205 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.415620] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e476285-9ef9-4447-9f02-b24f952456b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.422489] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f6a434-4cbf-4a82-a59c-588bbe4a909a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.435489] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.940735] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 972.940735] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 972.940735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.669s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.437315] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.437648] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.437685] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.437825] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.437971] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.438120] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1028.207758] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.203564] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1031.206719] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.207153] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.208189] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1032.208189] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1032.710536] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1032.710792] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.710937] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.711091] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1032.711234] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.214527] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.214892] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1033.214892] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1033.215064] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1033.216509] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f1df62-b5a6-4186-be47-1f1dab486ad8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.225670] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd36b57-57f2-4f60-9f24-65a85f3295dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.239257] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb96c1c-01f5-4c6b-93b8-334d91353b05 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.245078] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdad7ef-81df-40d4-9911-236cc3fc2fc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.272648] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181369MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1033.272786] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1033.272970] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1034.290242] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1034.290502] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1034.303941] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84fd3ae-1a64-4939-acf0-327121ec41cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.311709] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9f19bd-f680-43b4-9a94-f9eaa2f2c55b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.341511] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c9e435-0cd1-4958-9776-9e9fc9172417 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.348162] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117dec1b-9885-4ea2-babf-4e3fafb11dec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.361453] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.864746] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1034.866101] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1034.866285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.362981] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1035.363363] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.204554] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.207918] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.203124] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.206800] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.206988] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.207148] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1093.207647] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.208096] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.712057] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.712057] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.712057] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.712297] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1093.713024] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2d86ae-3d5b-4399-94fb-39a9615f09c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.721487] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d68f8f-c627-4d34-8587-0a9b694b9409 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.735834] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964b1987-aa1a-496a-acc0-2a065ef0f340 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.741945] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7088db5f-0f39-4a46-acb3-0db251413694 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.769537] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181390MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1093.769705] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.769884] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.787690] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1094.787936] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1094.800342] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ed024b-fd4f-4718-abb9-94f90cd35991 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.807824] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa3e22f-ef1b-4fb7-93f2-a1f143e5ccce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.837592] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cb6a39-020f-470e-9cc2-9649d6caf36a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.844209] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8679a23a-dc9f-4ad1-a80f-233698764833 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.857096] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.359927] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1095.361220] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1095.361397] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.361250] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1096.361723] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1096.361723] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1096.864586] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1096.864822] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1096.864986] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1150.208155] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.207470] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.207917] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.207917] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1154.203630] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.206156] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.206308] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1154.206425] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1154.709947] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1154.710201] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.710366] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.207231] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.710622] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.710842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.711022] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.711202] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1155.712087] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40df22a2-e2db-425a-8195-bd5da475551e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.720045] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0528ed84-8984-4261-a9f9-a6f6879cf820 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.733891] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ad6e5b-485e-4fff-9803-d334d0e0ad4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.740056] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384272de-5d21-41c7-bb09-2fee09721379 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.769551] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181390MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1155.769739] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.769909] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.790216] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1156.790532] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1156.804868] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7383119-6ed7-4c98-b1d4-408941100a9e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.812492] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42ccdd4-f1f4-4c84-af96-f24daf2b3dd8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.841100] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e4ec3b-28a5-429b-8c61-741a17e2d9ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.847688] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d417634-0ed9-482f-8750-d6f104cca0a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.860985] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.364398] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1157.365651] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1157.365830] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1158.365935] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.208871] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.710629] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.202879] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.207051] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.207051] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1213.207507] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.207507] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances with incomplete migration {{(pid=62510) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 1214.710800] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.711289] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1214.711289] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1215.214345] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1215.214578] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.214715] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.214868] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.717961] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.718358] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.718399] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1215.718557] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1215.719917] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25f8098-b4ab-4cec-a86a-60537959ee02 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.728300] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d639ab52-8adf-43f7-a2af-8c60391156a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.742387] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cc920f-b043-4468-b6c4-97376dd3f0e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.748431] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f798f7e3-ec85-4be4-82fa-79b42f422e50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.776396] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181389MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1215.776517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1215.776705] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1216.795078] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1216.795356] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1216.807983] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80dbcb4b-4edd-48b8-82b9-aa65fc55b245 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.815550] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f37db0f-cbdf-45ec-9a4e-67fbdf11d123 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.843974] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebd1a7c-8e3c-46c6-9253-3a9608c0f4fb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.851323] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc44b85e-8789-4747-a4cd-e5f54718d688 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.864711] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.367451] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1217.368702] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1217.368899] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1217.369136] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.369273] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 1217.872018] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] There are 0 instances to clean {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 1218.864534] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.864776] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.864919] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.810670] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_power_states {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.313786] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Getting list of instances from cluster (obj){ [ 1228.313786] env[62510]: value = "domain-c8" [ 1228.313786] env[62510]: _type = "ClusterComputeResource" [ 1228.313786] env[62510]: } {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1228.314887] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5102c7d-b360-40f6-91f7-79c22e9f5050 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.323465] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Got total of 0 instances {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1270.721902] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.207498] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.207892] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1275.207502] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.207726] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.711476] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.711727] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.711912] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.712108] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1275.712968] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bb62d5-8c54-4737-815d-041d8ae453f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.721007] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de681efc-5666-4fb5-888a-4f34111a1092 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.734902] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643ac230-cbee-49f3-910c-01ffaa5c6fbc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.740849] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fecef5-d84a-4b20-ac01-aa4a6aa6490f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.770165] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181392MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1275.770273] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.770466] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.887086] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1276.887398] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1276.902568] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1276.913802] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1276.913967] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1276.923187] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1276.937681] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1276.948979] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d52ed7-a111-439c-ba22-ca01f6618683 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.956739] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e411e6a-2020-4dd1-8d09-e2367b180cb3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.985759] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b68e3f8-c8f0-4861-9108-b90fc2b066ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.992346] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb828e0-1749-481a-bcfc-ff00e30c07a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.005867] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.509573] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1277.511016] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1277.511233] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.741s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.512068] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1278.512068] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1278.512068] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1278.512068] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1279.015562] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1279.015847] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.015993] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.016147] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.209691] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.208125] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.208125] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1336.202633] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.708083] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.210147] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.210573] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.210573] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.210726] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1337.211650] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c45ea2-7349-4cb6-8038-f2cfa33eb3d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.219945] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d50af5a-4861-4d9b-9178-00396a8e90de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.233558] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f73aa6-bd83-40e3-988f-9abbd758ed02 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.239519] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6aa4730-9f51-4a98-ab94-d60b6b231308 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.268721] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181393MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1337.268939] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.269063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1338.287525] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1338.287772] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1338.301201] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fabb71-5fef-406e-a96a-7edafbc38a58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.308874] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba75cf79-6644-406b-8aa5-0dd21fa44dbb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.337973] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0917582-93b1-4ad6-a687-d1c5b906d082 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.345058] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833dbf2b-4c7f-44ab-a332-babb823ad913 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.357879] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.861308] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1338.862538] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1338.862729] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.363418] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1339.363872] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1339.363872] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1339.363992] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1339.867443] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 1339.867682] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1339.867845] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1339.868000] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.207562] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.094194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "0604d37b-38c5-4510-894e-b26fd44e17c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.094822] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.599425] env[62510]: DEBUG nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1382.165564] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.165856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.170132] env[62510]: INFO nova.compute.claims [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1382.887157] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "5588650b-c450-489a-a456-3b580a5b9114" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.887598] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "5588650b-c450-489a-a456-3b580a5b9114" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.271565] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecb3ebc-6067-4656-a4af-d9ce93eb9e43 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.288177] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494053d9-19dd-4e19-b8ba-7e83f705e41a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.327334] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b05b3ff-012d-4c6b-a7a7-d50e719b977a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.339331] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00dcc3bb-dd5f-4adf-a3f1-5855bee85391 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.354032] env[62510]: DEBUG nova.compute.provider_tree [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.389812] env[62510]: DEBUG nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1383.756142] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "12768001-6ed0-47be-8f20-c59ee82b842a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.756378] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.857642] env[62510]: DEBUG nova.scheduler.client.report [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1383.923493] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.259321] env[62510]: DEBUG nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1384.363320] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.197s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.363985] env[62510]: DEBUG nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1384.368481] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.445s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.370260] env[62510]: INFO nova.compute.claims [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1384.788475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.874103] env[62510]: DEBUG nova.compute.utils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1384.878440] env[62510]: DEBUG nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1384.879285] env[62510]: DEBUG nova.network.neutron [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1385.170029] env[62510]: DEBUG nova.policy [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87bcaadb9e9c4f0b87e85f1e1ce537db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '176b26f90d5441208c6157567aaf19fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1385.293729] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.293958] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.394790] env[62510]: DEBUG nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1385.469148] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.469374] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.511995] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "731e7110-9709-4c4e-96d2-00e21e67c6e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.511995] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "731e7110-9709-4c4e-96d2-00e21e67c6e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.536523] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0edca86-7466-4113-aa79-59d8eb15662e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.544258] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864a65a6-b91d-44a8-b7ee-b43c82e7b456 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.575935] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e6b92d-941b-4606-af9f-6065ef23d644 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.585386] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9eff08-ae96-49c5-a28f-3e6613892c62 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.599187] env[62510]: DEBUG nova.compute.provider_tree [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.654729] env[62510]: DEBUG nova.network.neutron [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Successfully created port: b789c474-95af-4b6c-930a-2ce797a579f6 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1385.796579] env[62510]: DEBUG nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1385.973140] env[62510]: DEBUG nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1386.017021] env[62510]: DEBUG nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1386.104553] env[62510]: DEBUG nova.scheduler.client.report [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1386.248597] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "585784c5-b56a-435d-8b22-53bc5cb39b25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.248597] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.327930] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.340845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.341354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.409816] env[62510]: DEBUG nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1386.443654] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1386.444485] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1386.444485] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1386.444485] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1386.444485] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1386.446218] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1386.446218] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1386.446218] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1386.446218] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1386.446218] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1386.446461] env[62510]: DEBUG nova.virt.hardware [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1386.446461] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b25ca5-74c1-4a41-8936-6a94529f516b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.457663] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18c8a54-9c1f-48d8-a370-f99aefa7428e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.478821] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddec1ee-abd9-4f89-a08e-7aa2d6698d5a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.528334] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.541614] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.609247] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.241s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.609981] env[62510]: DEBUG nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1386.614192] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.825s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.614645] env[62510]: INFO nova.compute.claims [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1386.749574] env[62510]: DEBUG nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1386.847890] env[62510]: DEBUG nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1387.120313] env[62510]: DEBUG nova.compute.utils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1387.124503] env[62510]: DEBUG nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1387.124503] env[62510]: DEBUG nova.network.neutron [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1387.207298] env[62510]: DEBUG nova.policy [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a406a2bf0ccd4b99ba7dcb359a9b640e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e144c0bd2d124193a65ad53de8c43039', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1387.276491] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.368495] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1387.627133] env[62510]: DEBUG nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1387.775539] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019b9504-65d1-4126-a3ba-661db790a65c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.784722] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2411ab32-4bf9-45d1-82b7-ca9ffd00b8c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.821175] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdc5885-40bc-4946-8e05-8b82565a0521 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.833408] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85db84a5-b4f0-4198-be5b-d3067e8cbe80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.851963] env[62510]: DEBUG nova.compute.provider_tree [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.971217] env[62510]: DEBUG nova.network.neutron [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Successfully created port: d87267d7-ec03-4d4a-a31a-9cb46a459d3c {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1388.084130] env[62510]: DEBUG nova.network.neutron [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Successfully updated port: b789c474-95af-4b6c-930a-2ce797a579f6 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1388.357269] env[62510]: DEBUG nova.scheduler.client.report [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1388.540871] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.541144] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.565387] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.565387] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.589084] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1388.589243] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.589400] env[62510]: DEBUG nova.network.neutron [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1388.640817] env[62510]: DEBUG nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1388.671644] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1388.672416] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1388.672416] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1388.672416] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1388.672416] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1388.673158] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1388.673158] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1388.673158] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1388.673158] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1388.673271] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1388.673353] env[62510]: DEBUG nova.virt.hardware [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1388.674291] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6fefed-628d-4a55-b5bf-3a525f6cf13f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.682438] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d427141f-83e6-4f03-b34c-669dc70e9f49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.862133] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.862637] env[62510]: DEBUG nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1388.865960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.538s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.867328] env[62510]: INFO nova.compute.claims [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1389.043510] env[62510]: DEBUG nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1389.069170] env[62510]: DEBUG nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1389.161295] env[62510]: DEBUG nova.network.neutron [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1389.163848] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.163848] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.339616] env[62510]: DEBUG nova.compute.manager [req-93434714-87e1-43a5-9d79-99c451eeda7a req-bda77d0e-eb26-4c8f-b2e9-a201a9344335 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Received event network-vif-plugged-b789c474-95af-4b6c-930a-2ce797a579f6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1389.339616] env[62510]: DEBUG oslo_concurrency.lockutils [req-93434714-87e1-43a5-9d79-99c451eeda7a req-bda77d0e-eb26-4c8f-b2e9-a201a9344335 service nova] Acquiring lock "0604d37b-38c5-4510-894e-b26fd44e17c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.339758] env[62510]: DEBUG oslo_concurrency.lockutils [req-93434714-87e1-43a5-9d79-99c451eeda7a req-bda77d0e-eb26-4c8f-b2e9-a201a9344335 service nova] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.340378] env[62510]: DEBUG oslo_concurrency.lockutils [req-93434714-87e1-43a5-9d79-99c451eeda7a req-bda77d0e-eb26-4c8f-b2e9-a201a9344335 service nova] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.340790] env[62510]: DEBUG nova.compute.manager [req-93434714-87e1-43a5-9d79-99c451eeda7a req-bda77d0e-eb26-4c8f-b2e9-a201a9344335 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] No waiting events found dispatching network-vif-plugged-b789c474-95af-4b6c-930a-2ce797a579f6 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1389.340943] env[62510]: WARNING nova.compute.manager [req-93434714-87e1-43a5-9d79-99c451eeda7a req-bda77d0e-eb26-4c8f-b2e9-a201a9344335 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Received unexpected event network-vif-plugged-b789c474-95af-4b6c-930a-2ce797a579f6 for instance with vm_state building and task_state spawning. [ 1389.374464] env[62510]: DEBUG nova.compute.utils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1389.376226] env[62510]: DEBUG nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1389.376411] env[62510]: DEBUG nova.network.neutron [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1389.573778] env[62510]: DEBUG nova.policy [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec8f49592421487c89b77efc86542f3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c74083aa7b4a4db5b9b6d6248beb3ff3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1389.581553] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.599077] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.611692] env[62510]: DEBUG nova.network.neutron [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Updating instance_info_cache with network_info: [{"id": "b789c474-95af-4b6c-930a-2ce797a579f6", "address": "fa:16:3e:35:db:d1", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb789c474-95", "ovs_interfaceid": "b789c474-95af-4b6c-930a-2ce797a579f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.882091] env[62510]: DEBUG nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1390.010546] env[62510]: DEBUG nova.network.neutron [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Successfully updated port: d87267d7-ec03-4d4a-a31a-9cb46a459d3c {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1390.111107] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a2769b-30e5-4936-9958-559773dd554d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.114384] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Releasing lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.114739] env[62510]: DEBUG nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Instance network_info: |[{"id": "b789c474-95af-4b6c-930a-2ce797a579f6", "address": "fa:16:3e:35:db:d1", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb789c474-95", "ovs_interfaceid": "b789c474-95af-4b6c-930a-2ce797a579f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1390.118402] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:db:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b789c474-95af-4b6c-930a-2ce797a579f6', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1390.132169] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.132312] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b7a2df6-d022-42a5-a2d1-e9d35b91f194 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.135933] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb94edc4-5dd4-4446-ae87-1fe949117018 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.151880] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Created folder: OpenStack in parent group-v4. [ 1390.152105] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Creating folder: Project (176b26f90d5441208c6157567aaf19fe). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.178840] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5617562e-a067-4f3f-ac83-1a5d7ea0f413 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.181783] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566bbe20-32c1-47ce-99ed-4aa091327e19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.189974] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e22381-8859-48bf-9a15-6b9643a1b713 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.196173] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Created folder: Project (176b26f90d5441208c6157567aaf19fe) in parent group-v367197. [ 1390.196173] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Creating folder: Instances. Parent ref: group-v367198. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.196600] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5ff077b-022d-44c1-91eb-0857666d90b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.208673] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.213245] env[62510]: DEBUG nova.compute.provider_tree [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.224011] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Created folder: Instances in parent group-v367198. [ 1390.224011] env[62510]: DEBUG oslo.service.loopingcall [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.224324] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1390.225886] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a95ad11d-335f-45f4-a554-4fc1861ab5db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.248367] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1390.248367] env[62510]: value = "task-1768165" [ 1390.248367] env[62510]: _type = "Task" [ 1390.248367] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.259980] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768165, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.517747] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-5588650b-c450-489a-a456-3b580a5b9114" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.518200] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-5588650b-c450-489a-a456-3b580a5b9114" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.518646] env[62510]: DEBUG nova.network.neutron [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1390.600846] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.602241] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.644785] env[62510]: DEBUG nova.network.neutron [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Successfully created port: 7489ebb6-ec5f-4097-9a62-81a2d3dedd52 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1390.717261] env[62510]: DEBUG nova.scheduler.client.report [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1390.739362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.739538] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.760575] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768165, 'name': CreateVM_Task, 'duration_secs': 0.385187} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.760825] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1390.772173] env[62510]: DEBUG oslo_vmware.service [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4480f9ad-d8b1-457e-af34-ec467466bd2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.780198] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.780379] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.781098] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1390.781355] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7873ab9a-0a39-411b-8a32-678e1a9a9e7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.786408] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1390.786408] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cced09-d857-d0d9-f4f6-e9625d5918a3" [ 1390.786408] env[62510]: _type = "Task" [ 1390.786408] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.796638] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cced09-d857-d0d9-f4f6-e9625d5918a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.893758] env[62510]: DEBUG nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1390.921156] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1390.921156] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1390.921368] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1390.921401] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1390.921574] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1390.921692] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1390.921900] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1390.922074] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1390.922952] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1390.922952] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1390.922952] env[62510]: DEBUG nova.virt.hardware [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1390.925520] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2096ab15-6928-417e-b98d-b05fd56776e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.933699] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df603319-aaed-4aea-87d8-9598a8872e64 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.139465] env[62510]: DEBUG nova.network.neutron [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1391.228592] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.229323] env[62510]: DEBUG nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1391.233296] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.706s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.235562] env[62510]: INFO nova.compute.claims [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.300937] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.300937] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1391.302404] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1391.302710] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.303270] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1391.303676] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45f4c35e-450a-428e-9ce3-0d5edda72baa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.326347] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1391.326715] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1391.328443] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb63cfe-c37f-4f16-b1e7-be727ac603eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.340271] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3178d9b1-af32-40fd-8b95-5807c4306e78 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.353445] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1391.353445] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5210a1a3-cbe4-99de-2722-e07143b5e4fc" [ 1391.353445] env[62510]: _type = "Task" [ 1391.353445] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.371071] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Preparing fetch location {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1391.372017] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Creating directory with path [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1391.372692] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a885ce4-7147-4572-ae78-d0b0ab7052dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.400497] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Created directory with path [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1391.400719] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Fetch image to [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1391.400897] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Downloading image file data 645af513-c243-4722-b631-714f21477ae6 to [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk on the data store datastore1 {{(pid=62510) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1391.401870] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27b0cba-acaa-40c6-9780-d4c49918d7ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.413756] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1c67d0-87e3-48fc-836b-c0b7225859b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.418805] env[62510]: DEBUG nova.network.neutron [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Updating instance_info_cache with network_info: [{"id": "d87267d7-ec03-4d4a-a31a-9cb46a459d3c", "address": "fa:16:3e:eb:1f:d2", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd87267d7-ec", "ovs_interfaceid": "d87267d7-ec03-4d4a-a31a-9cb46a459d3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1391.428092] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b0e458-ad6f-43c8-83f2-1ceab4cf2ded {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.468401] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c5e313-6f73-4ccc-806e-10850bd75089 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.475160] env[62510]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-897e2726-569a-4872-9d5a-200c0d5ee67e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.503647] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Downloading image file data 645af513-c243-4722-b631-714f21477ae6 to the data store datastore1 {{(pid=62510) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1391.569901] env[62510]: DEBUG oslo_vmware.rw_handles [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62510) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1391.701452] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "34a464e2-d38e-4c24-a487-c62a4f484667" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1391.701756] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "34a464e2-d38e-4c24-a487-c62a4f484667" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.740585] env[62510]: DEBUG nova.compute.utils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.748137] env[62510]: DEBUG nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1391.748137] env[62510]: DEBUG nova.network.neutron [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.812608] env[62510]: DEBUG nova.policy [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23b651d8ee3c41d8bba2cb1366c68b85', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97e5d626c8c04377b1653057d6fb63e8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1391.921642] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-5588650b-c450-489a-a456-3b580a5b9114" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.922438] env[62510]: DEBUG nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Instance network_info: |[{"id": "d87267d7-ec03-4d4a-a31a-9cb46a459d3c", "address": "fa:16:3e:eb:1f:d2", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd87267d7-ec", "ovs_interfaceid": "d87267d7-ec03-4d4a-a31a-9cb46a459d3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1391.923190] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:1f:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd87267d7-ec03-4d4a-a31a-9cb46a459d3c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1391.935964] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating folder: Project (e144c0bd2d124193a65ad53de8c43039). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1391.942421] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4b8fc94-a391-48b8-8b57-01920ec5747b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.959401] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created folder: Project (e144c0bd2d124193a65ad53de8c43039) in parent group-v367197. [ 1391.959607] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating folder: Instances. Parent ref: group-v367201. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1391.959845] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbc80b0d-591b-4da0-a177-08205a69da5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.970455] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created folder: Instances in parent group-v367201. [ 1391.970825] env[62510]: DEBUG oslo.service.loopingcall [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1391.970934] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1391.971157] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16a7807c-7058-4faa-9c7b-9d5efed685a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.020609] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1392.020609] env[62510]: value = "task-1768168" [ 1392.020609] env[62510]: _type = "Task" [ 1392.020609] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.032282] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768168, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.246496] env[62510]: DEBUG nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1392.328590] env[62510]: DEBUG oslo_vmware.rw_handles [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Completed reading data from the image iterator. {{(pid=62510) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1392.331813] env[62510]: DEBUG oslo_vmware.rw_handles [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1392.395708] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Downloaded image file data 645af513-c243-4722-b631-714f21477ae6 to vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk on the data store datastore1 {{(pid=62510) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1392.397687] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Caching image {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1392.397805] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Copying Virtual Disk [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk to [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1392.399561] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7f7137f-d581-46cd-8a57-37180ab7bc45 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.410904] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1392.410904] env[62510]: value = "task-1768169" [ 1392.410904] env[62510]: _type = "Task" [ 1392.410904] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.425198] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.450734] env[62510]: DEBUG nova.network.neutron [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Successfully created port: 55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.537737] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768168, 'name': CreateVM_Task, 'duration_secs': 0.395232} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.538076] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1392.539932] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.539932] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.540521] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1392.540521] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ab279a2-e507-4a89-9c97-ea57357a7c71 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.552371] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1392.552371] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52352995-f9a4-a90a-6674-d43110d78e06" [ 1392.552371] env[62510]: _type = "Task" [ 1392.552371] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.563181] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.563638] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1392.564169] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.565459] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5e37cb-f3df-4bc1-bc26-937e7fdafc81 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.575079] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba34736b-ed2c-4764-94b1-b4751de67923 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.611913] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7b6ab6-43f0-4a24-96fd-9539f73db5df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.620447] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e6a848-ba1d-4e25-bf6a-4f0ea4b407a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.641794] env[62510]: DEBUG nova.compute.provider_tree [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1392.924819] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768169, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.030797] env[62510]: DEBUG nova.compute.manager [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Received event network-changed-b789c474-95af-4b6c-930a-2ce797a579f6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1393.031015] env[62510]: DEBUG nova.compute.manager [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Refreshing instance network info cache due to event network-changed-b789c474-95af-4b6c-930a-2ce797a579f6. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1393.031545] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Acquiring lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.031545] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Acquired lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.032052] env[62510]: DEBUG nova.network.neutron [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Refreshing network info cache for port b789c474-95af-4b6c-930a-2ce797a579f6 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1393.144114] env[62510]: DEBUG nova.scheduler.client.report [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1393.268279] env[62510]: DEBUG nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1393.299560] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1393.299837] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1393.300202] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.300579] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1393.301055] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.301508] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1393.301795] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1393.302047] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1393.302298] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1393.302496] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1393.302676] env[62510]: DEBUG nova.virt.hardware [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1393.304315] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf74b39c-66c2-4b6b-b822-da860e605766 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.313319] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcdeab8-7544-4dfc-b894-2b644af45115 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.422856] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768169, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731906} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.423159] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Copied Virtual Disk [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk to [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1393.423347] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Deleting the datastore file [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6/tmp-sparse.vmdk {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.423867] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3a2f59d-2030-41c9-940e-6667779fa572 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.433488] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1393.433488] env[62510]: value = "task-1768170" [ 1393.433488] env[62510]: _type = "Task" [ 1393.433488] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.444354] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.622599] env[62510]: DEBUG nova.network.neutron [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Successfully updated port: 7489ebb6-ec5f-4097-9a62-81a2d3dedd52 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1393.662271] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.662271] env[62510]: DEBUG nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1393.663907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.123s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.666143] env[62510]: INFO nova.compute.claims [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1393.948192] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.022637} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.948192] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1393.948513] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Moving file from [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7/645af513-c243-4722-b631-714f21477ae6 to [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6. {{(pid=62510) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1393.948558] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-9e8bb848-6925-4d43-b355-1a9f38106be6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.960279] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1393.960279] env[62510]: value = "task-1768171" [ 1393.960279] env[62510]: _type = "Task" [ 1393.960279] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.970661] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768171, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.131680] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.131964] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.132183] env[62510]: DEBUG nova.network.neutron [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1394.177088] env[62510]: DEBUG nova.compute.utils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1394.178769] env[62510]: DEBUG nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1394.178769] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1394.280063] env[62510]: DEBUG nova.policy [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2897473b1e44271b67789290b3477c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd33701c4eedd47268e1c8d16bd63de81', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1394.423272] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "7490c825-dfd5-409c-9fd6-0e78643338fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.423586] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "7490c825-dfd5-409c-9fd6-0e78643338fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.477162] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768171, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027306} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.477478] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] File moved {{(pid=62510) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1394.477778] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Cleaning up location [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1394.477844] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Deleting the datastore file [datastore1] vmware_temp/abdf034c-3ae7-48ad-bd63-3f0611f511d7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1394.478139] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9d5d25a-88eb-4302-bb0b-fc6005d22eb0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.485069] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1394.485069] env[62510]: value = "task-1768172" [ 1394.485069] env[62510]: _type = "Task" [ 1394.485069] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.495774] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768172, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.609098] env[62510]: DEBUG nova.compute.manager [req-2d060a0d-ac02-400c-b449-370ed744d0bf req-8c09ab7c-12a9-42ca-b56e-3b77a8a2c578 service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Received event network-vif-plugged-7489ebb6-ec5f-4097-9a62-81a2d3dedd52 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1394.609098] env[62510]: DEBUG oslo_concurrency.lockutils [req-2d060a0d-ac02-400c-b449-370ed744d0bf req-8c09ab7c-12a9-42ca-b56e-3b77a8a2c578 service nova] Acquiring lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.609098] env[62510]: DEBUG oslo_concurrency.lockutils [req-2d060a0d-ac02-400c-b449-370ed744d0bf req-8c09ab7c-12a9-42ca-b56e-3b77a8a2c578 service nova] Lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.609098] env[62510]: DEBUG oslo_concurrency.lockutils [req-2d060a0d-ac02-400c-b449-370ed744d0bf req-8c09ab7c-12a9-42ca-b56e-3b77a8a2c578 service nova] Lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.609098] env[62510]: DEBUG nova.compute.manager [req-2d060a0d-ac02-400c-b449-370ed744d0bf req-8c09ab7c-12a9-42ca-b56e-3b77a8a2c578 service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] No waiting events found dispatching network-vif-plugged-7489ebb6-ec5f-4097-9a62-81a2d3dedd52 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1394.609454] env[62510]: WARNING nova.compute.manager [req-2d060a0d-ac02-400c-b449-370ed744d0bf req-8c09ab7c-12a9-42ca-b56e-3b77a8a2c578 service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Received unexpected event network-vif-plugged-7489ebb6-ec5f-4097-9a62-81a2d3dedd52 for instance with vm_state building and task_state spawning. [ 1394.686303] env[62510]: DEBUG nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1394.694184] env[62510]: DEBUG nova.network.neutron [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Updated VIF entry in instance network info cache for port b789c474-95af-4b6c-930a-2ce797a579f6. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1394.694567] env[62510]: DEBUG nova.network.neutron [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Updating instance_info_cache with network_info: [{"id": "b789c474-95af-4b6c-930a-2ce797a579f6", "address": "fa:16:3e:35:db:d1", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb789c474-95", "ovs_interfaceid": "b789c474-95af-4b6c-930a-2ce797a579f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.724896] env[62510]: DEBUG nova.network.neutron [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1394.935161] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e5f4a2-d521-47b4-8811-edb313db0dda {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.949498] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c86ee1-401a-47e4-94b1-3332068c34ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.987682] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Successfully created port: ca5eb991-9338-4e3a-8dcc-322896c420df {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1394.993386] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d5367a-6fea-4c8b-b897-74328ab3bbc3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.005742] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b87256d-d03c-4546-8b60-e6c909fcbec4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.010254] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768172, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025874} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.011060] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1395.011664] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79c3bd59-a872-428c-824c-f4d29a25ee2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.023036] env[62510]: DEBUG nova.compute.provider_tree [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.031237] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1395.031237] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52204a93-c7b7-2c2f-5458-0d3867580dc8" [ 1395.031237] env[62510]: _type = "Task" [ 1395.031237] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.040563] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52204a93-c7b7-2c2f-5458-0d3867580dc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.197336] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Releasing lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.197532] env[62510]: DEBUG nova.compute.manager [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Received event network-vif-plugged-d87267d7-ec03-4d4a-a31a-9cb46a459d3c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1395.200699] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Acquiring lock "5588650b-c450-489a-a456-3b580a5b9114-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.200699] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Lock "5588650b-c450-489a-a456-3b580a5b9114-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.200699] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Lock "5588650b-c450-489a-a456-3b580a5b9114-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1395.200699] env[62510]: DEBUG nova.compute.manager [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] No waiting events found dispatching network-vif-plugged-d87267d7-ec03-4d4a-a31a-9cb46a459d3c {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1395.200699] env[62510]: WARNING nova.compute.manager [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Received unexpected event network-vif-plugged-d87267d7-ec03-4d4a-a31a-9cb46a459d3c for instance with vm_state building and task_state spawning. [ 1395.200868] env[62510]: DEBUG nova.compute.manager [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Received event network-changed-d87267d7-ec03-4d4a-a31a-9cb46a459d3c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1395.200868] env[62510]: DEBUG nova.compute.manager [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Refreshing instance network info cache due to event network-changed-d87267d7-ec03-4d4a-a31a-9cb46a459d3c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1395.200868] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Acquiring lock "refresh_cache-5588650b-c450-489a-a456-3b580a5b9114" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.200868] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Acquired lock "refresh_cache-5588650b-c450-489a-a456-3b580a5b9114" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.200868] env[62510]: DEBUG nova.network.neutron [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Refreshing network info cache for port d87267d7-ec03-4d4a-a31a-9cb46a459d3c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1395.227655] env[62510]: DEBUG nova.network.neutron [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [{"id": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "address": "fa:16:3e:81:65:65", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7489ebb6-ec", "ovs_interfaceid": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.303579] env[62510]: DEBUG nova.network.neutron [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Successfully updated port: 55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1395.531036] env[62510]: DEBUG nova.scheduler.client.report [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1395.537398] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Successfully created port: e83f36f6-e38c-49b4-b419-59f9030e6005 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1395.546911] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52204a93-c7b7-2c2f-5458-0d3867580dc8, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.548285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.548554] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0604d37b-38c5-4510-894e-b26fd44e17c5/0604d37b-38c5-4510-894e-b26fd44e17c5.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1395.548835] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.549027] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.549305] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2f9cb84-474e-4d93-aa91-23f6e4d2dcc6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.554055] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a105bde-4c24-4a42-af5e-74da4d660b7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.561620] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1395.561620] env[62510]: value = "task-1768173" [ 1395.561620] env[62510]: _type = "Task" [ 1395.561620] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.562179] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.562345] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1395.567406] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92b80368-7f30-4ebe-843a-c353fb232803 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.575313] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.576605] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1395.576605] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52370b97-0e22-2640-c726-e1e25d6fd20c" [ 1395.576605] env[62510]: _type = "Task" [ 1395.576605] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.585262] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52370b97-0e22-2640-c726-e1e25d6fd20c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.704020] env[62510]: DEBUG nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1395.743556] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1395.743810] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1395.743966] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1395.744169] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1395.744311] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1395.744625] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1395.744736] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1395.744886] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1395.745072] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1395.745239] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1395.745415] env[62510]: DEBUG nova.virt.hardware [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1395.745903] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.746285] env[62510]: DEBUG nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Instance network_info: |[{"id": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "address": "fa:16:3e:81:65:65", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7489ebb6-ec", "ovs_interfaceid": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1395.747025] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0cce25-3da0-4b16-a060-4b8a60b84e8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.749946] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:65:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7489ebb6-ec5f-4097-9a62-81a2d3dedd52', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1395.757661] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Creating folder: Project (c74083aa7b4a4db5b9b6d6248beb3ff3). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.761548] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-629d5d9a-ba5f-4400-b0a4-9f974ac9a6af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.768525] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86d9ff9-d399-4dc9-9b32-dc6cf372dab4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.775549] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Created folder: Project (c74083aa7b4a4db5b9b6d6248beb3ff3) in parent group-v367197. [ 1395.775549] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Creating folder: Instances. Parent ref: group-v367204. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1395.775549] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4bf918a9-1457-4fcd-941c-213c1d72215a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.796922] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Created folder: Instances in parent group-v367204. [ 1395.797057] env[62510]: DEBUG oslo.service.loopingcall [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1395.797266] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1395.797480] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bae42a25-d2d1-4e78-988b-78578bbb9441 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.813273] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "refresh_cache-3266d254-4a75-4fd3-b4e7-ebeb86467cbe" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.813469] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquired lock "refresh_cache-3266d254-4a75-4fd3-b4e7-ebeb86467cbe" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.813641] env[62510]: DEBUG nova.network.neutron [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1395.823173] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1395.823173] env[62510]: value = "task-1768176" [ 1395.823173] env[62510]: _type = "Task" [ 1395.823173] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.833089] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768176, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.004909] env[62510]: DEBUG nova.network.neutron [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Updated VIF entry in instance network info cache for port d87267d7-ec03-4d4a-a31a-9cb46a459d3c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1396.005455] env[62510]: DEBUG nova.network.neutron [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Updating instance_info_cache with network_info: [{"id": "d87267d7-ec03-4d4a-a31a-9cb46a459d3c", "address": "fa:16:3e:eb:1f:d2", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd87267d7-ec", "ovs_interfaceid": "d87267d7-ec03-4d4a-a31a-9cb46a459d3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.037405] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.037405] env[62510]: DEBUG nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1396.039555] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.763s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.042373] env[62510]: INFO nova.compute.claims [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.072520] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768173, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.089582] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52370b97-0e22-2640-c726-e1e25d6fd20c, 'name': SearchDatastore_Task, 'duration_secs': 0.023976} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.091763] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4455e9b-cc81-4d02-87ee-6a2b7481063f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.102150] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1396.102150] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52126c97-b673-805d-b858-485be670b4da" [ 1396.102150] env[62510]: _type = "Task" [ 1396.102150] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.113726] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52126c97-b673-805d-b858-485be670b4da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.200192] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Successfully created port: 55cae8a5-e495-4d62-a2c0-b2effaf346ec {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1396.207609] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.207971] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1396.333806] env[62510]: DEBUG nova.compute.manager [req-a407dc4d-a4fb-4bd8-af16-526a1616d674 req-9cd0345e-8a6c-46d2-9af6-b7ea3a28255c service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Received event network-vif-plugged-55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1396.334033] env[62510]: DEBUG oslo_concurrency.lockutils [req-a407dc4d-a4fb-4bd8-af16-526a1616d674 req-9cd0345e-8a6c-46d2-9af6-b7ea3a28255c service nova] Acquiring lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.334546] env[62510]: DEBUG oslo_concurrency.lockutils [req-a407dc4d-a4fb-4bd8-af16-526a1616d674 req-9cd0345e-8a6c-46d2-9af6-b7ea3a28255c service nova] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.334546] env[62510]: DEBUG oslo_concurrency.lockutils [req-a407dc4d-a4fb-4bd8-af16-526a1616d674 req-9cd0345e-8a6c-46d2-9af6-b7ea3a28255c service nova] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.334650] env[62510]: DEBUG nova.compute.manager [req-a407dc4d-a4fb-4bd8-af16-526a1616d674 req-9cd0345e-8a6c-46d2-9af6-b7ea3a28255c service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] No waiting events found dispatching network-vif-plugged-55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1396.334762] env[62510]: WARNING nova.compute.manager [req-a407dc4d-a4fb-4bd8-af16-526a1616d674 req-9cd0345e-8a6c-46d2-9af6-b7ea3a28255c service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Received unexpected event network-vif-plugged-55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c for instance with vm_state building and task_state spawning. [ 1396.342213] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768176, 'name': CreateVM_Task, 'duration_secs': 0.451236} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.342213] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1396.344316] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.344562] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.345746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1396.345746] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26d86e05-d40e-462a-a12e-2e9ab7cd63da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.352101] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1396.352101] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52db23f1-2efa-3391-4cfd-ba18269f50ba" [ 1396.352101] env[62510]: _type = "Task" [ 1396.352101] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.361812] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52db23f1-2efa-3391-4cfd-ba18269f50ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.367550] env[62510]: DEBUG nova.network.neutron [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1396.509031] env[62510]: DEBUG oslo_concurrency.lockutils [req-5ee2906c-2497-496e-9de9-0a7031907bc3 req-ad7dbbdc-2029-454d-9c9a-51ca01462e77 service nova] Releasing lock "refresh_cache-5588650b-c450-489a-a456-3b580a5b9114" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.549519] env[62510]: DEBUG nova.compute.utils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1396.550913] env[62510]: DEBUG nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Not allocating networking since 'none' was specified. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1396.577323] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768173, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573506} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.577921] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0604d37b-38c5-4510-894e-b26fd44e17c5/0604d37b-38c5-4510-894e-b26fd44e17c5.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1396.578481] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1396.578584] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-499901aa-7b34-44b4-837a-f51d1c5d5f1c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.586825] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1396.586825] env[62510]: value = "task-1768177" [ 1396.586825] env[62510]: _type = "Task" [ 1396.586825] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.597378] env[62510]: DEBUG nova.network.neutron [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Updating instance_info_cache with network_info: [{"id": "55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c", "address": "fa:16:3e:4f:f8:90", "network": {"id": "30c7a896-1d3b-4c07-900b-66caaf19e751", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-204185731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97e5d626c8c04377b1653057d6fb63e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c9d2e9-7f", "ovs_interfaceid": "55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.604325] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.614027] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52126c97-b673-805d-b858-485be670b4da, 'name': SearchDatastore_Task, 'duration_secs': 0.019465} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.614027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.614180] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1396.614359] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25a65a7b-02e6-4cfb-b34e-f51db8491ac7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.621125] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1396.621125] env[62510]: value = "task-1768178" [ 1396.621125] env[62510]: _type = "Task" [ 1396.621125] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.629729] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768178, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.864780] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52db23f1-2efa-3391-4cfd-ba18269f50ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008926} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.867033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.867033] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1396.867033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.867033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.867214] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1396.867214] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40ff5165-a174-4feb-b4b7-f488a9a4aa31 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.883905] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1396.883905] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1396.884250] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e467607e-a966-4a77-88c2-301563878ec4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.892095] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1396.892095] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529cc2e2-7f52-a611-6964-f28b10829f1e" [ 1396.892095] env[62510]: _type = "Task" [ 1396.892095] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.901824] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529cc2e2-7f52-a611-6964-f28b10829f1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.052861] env[62510]: DEBUG nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1397.105534] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Releasing lock "refresh_cache-3266d254-4a75-4fd3-b4e7-ebeb86467cbe" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.105774] env[62510]: DEBUG nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Instance network_info: |[{"id": "55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c", "address": "fa:16:3e:4f:f8:90", "network": {"id": "30c7a896-1d3b-4c07-900b-66caaf19e751", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-204185731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97e5d626c8c04377b1653057d6fb63e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c9d2e9-7f", "ovs_interfaceid": "55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1397.106350] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064194} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.110677] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:f8:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '517421c3-bea0-419c-ab0b-987815e5d160', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1397.119399] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Creating folder: Project (97e5d626c8c04377b1653057d6fb63e8). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.119734] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1397.121069] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df64d765-53af-49b0-b5c5-05c9d1a67a63 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.125041] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5ecbbf-9e32-4749-b8a7-265888eddea5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.165228] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 0604d37b-38c5-4510-894e-b26fd44e17c5/0604d37b-38c5-4510-894e-b26fd44e17c5.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.170521] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11ef0bdc-d55d-4ddd-9342-c4f6013f3e30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.191358] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Created folder: Project (97e5d626c8c04377b1653057d6fb63e8) in parent group-v367197. [ 1397.191358] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Creating folder: Instances. Parent ref: group-v367207. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.191358] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-764a344b-e2b6-482c-9f97-24ec3bfdb123 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.195464] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768178, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.200737] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1397.200737] env[62510]: value = "task-1768180" [ 1397.200737] env[62510]: _type = "Task" [ 1397.200737] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.205256] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Created folder: Instances in parent group-v367207. [ 1397.206044] env[62510]: DEBUG oslo.service.loopingcall [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1397.206044] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1397.209806] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.210822] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78636e23-10b1-4044-b1c1-4e46b09fa8dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.234096] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.239259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.245767] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1397.245767] env[62510]: value = "task-1768182" [ 1397.245767] env[62510]: _type = "Task" [ 1397.245767] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.257699] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768182, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.275050] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.275050] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.422320] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529cc2e2-7f52-a611-6964-f28b10829f1e, 'name': SearchDatastore_Task, 'duration_secs': 0.047866} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.425682] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea053cca-ab14-4962-aecc-cee9d3af8ed4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.438200] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1397.438200] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52237dab-5a23-94f0-371c-9110b68c4e44" [ 1397.438200] env[62510]: _type = "Task" [ 1397.438200] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.453376] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52237dab-5a23-94f0-371c-9110b68c4e44, 'name': SearchDatastore_Task, 'duration_secs': 0.010142} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.454295] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.454295] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 12768001-6ed0-47be-8f20-c59ee82b842a/12768001-6ed0-47be-8f20-c59ee82b842a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1397.454881] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de61025e-5e6a-431a-8fa0-d408652c038b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.464594] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1397.464594] env[62510]: value = "task-1768183" [ 1397.464594] env[62510]: _type = "Task" [ 1397.464594] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.485954] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.543242] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92e11fa-d222-4cf3-a216-9bf9681d2404 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.553737] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d550db1-8ada-47e9-b125-644a060b8e0a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.598204] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e655afc-baa6-431c-bd55-c8d6999cef46 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.606658] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c30ef1-e4d4-4a67-a86a-97fa95fce9dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.623071] env[62510]: DEBUG nova.compute.provider_tree [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.639433] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768178, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594633} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.639734] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1397.639875] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1397.640324] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a44e725-c07d-469c-bef3-5608a50a4bb8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.648159] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1397.648159] env[62510]: value = "task-1768184" [ 1397.648159] env[62510]: _type = "Task" [ 1397.648159] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.660444] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768184, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.717083] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768180, 'name': ReconfigVM_Task, 'duration_secs': 0.293643} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.717307] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 0604d37b-38c5-4510-894e-b26fd44e17c5/0604d37b-38c5-4510-894e-b26fd44e17c5.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1397.718031] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f2656b2-93c7-44ef-8a89-a302751c4679 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.724639] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1397.724639] env[62510]: value = "task-1768185" [ 1397.724639] env[62510]: _type = "Task" [ 1397.724639] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.732833] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768185, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.758661] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768182, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.975495] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768183, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.100603] env[62510]: DEBUG nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1398.127266] env[62510]: DEBUG nova.scheduler.client.report [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1398.138030] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1398.138030] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1398.138030] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1398.138393] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1398.138393] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1398.138393] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1398.138393] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1398.138393] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1398.138539] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1398.138539] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1398.138539] env[62510]: DEBUG nova.virt.hardware [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1398.138539] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce6b6b2-ae67-4732-9e43-17c2db7df6b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.147254] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c416779-bba3-4f63-97fd-fec58adfe1c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.160521] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768184, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068916} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.170496] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1398.171901] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1398.178915] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Creating folder: Project (31c1ec843c8f470a8fe9ffcc1bdf64a3). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1398.182283] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af6ba28-cde1-4848-a230-fa21c418a0b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.183818] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1691a4d-8262-4475-975c-cdf25abe72f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.209314] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1398.211393] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fc7bf96-b917-454b-8ecf-f8364e9292be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.227522] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Created folder: Project (31c1ec843c8f470a8fe9ffcc1bdf64a3) in parent group-v367197. [ 1398.227736] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Creating folder: Instances. Parent ref: group-v367210. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1398.228367] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a890ee41-891d-4f6f-b50f-9b755378ecf8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.240474] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768185, 'name': Rename_Task, 'duration_secs': 0.154936} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.242441] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1398.242621] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1398.242621] env[62510]: value = "task-1768187" [ 1398.242621] env[62510]: _type = "Task" [ 1398.242621] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.242817] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41f56b8c-ec87-43ff-a090-9eb585199d2a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.251123] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Created folder: Instances in parent group-v367210. [ 1398.251428] env[62510]: DEBUG oslo.service.loopingcall [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.252154] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1398.255970] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edfd6882-93c9-45d8-9743-2810646684e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.276952] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1398.276952] env[62510]: value = "task-1768189" [ 1398.276952] env[62510]: _type = "Task" [ 1398.276952] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.277189] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.287613] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768182, 'name': CreateVM_Task, 'duration_secs': 0.732125} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.288670] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1398.289656] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.290153] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.290400] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1398.294818] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fa533c3-4b4f-4482-9c15-9db7d489d63a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.296835] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768189, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.298233] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1398.298233] env[62510]: value = "task-1768190" [ 1398.298233] env[62510]: _type = "Task" [ 1398.298233] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.303208] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1398.303208] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52627c15-c870-8092-6dee-20f46270e907" [ 1398.303208] env[62510]: _type = "Task" [ 1398.303208] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.309916] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768190, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.314609] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52627c15-c870-8092-6dee-20f46270e907, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.437807] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Successfully updated port: ca5eb991-9338-4e3a-8dcc-322896c420df {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1398.447727] env[62510]: DEBUG nova.compute.manager [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Received event network-changed-7489ebb6-ec5f-4097-9a62-81a2d3dedd52 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1398.447727] env[62510]: DEBUG nova.compute.manager [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Refreshing instance network info cache due to event network-changed-7489ebb6-ec5f-4097-9a62-81a2d3dedd52. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1398.447727] env[62510]: DEBUG oslo_concurrency.lockutils [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] Acquiring lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.447727] env[62510]: DEBUG oslo_concurrency.lockutils [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] Acquired lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.447727] env[62510]: DEBUG nova.network.neutron [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Refreshing network info cache for port 7489ebb6-ec5f-4097-9a62-81a2d3dedd52 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1398.477745] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.803003} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.477968] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 12768001-6ed0-47be-8f20-c59ee82b842a/12768001-6ed0-47be-8f20-c59ee82b842a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1398.478114] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1398.478367] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8e837df-92cb-4fdd-b10f-c489956ceb69 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.488796] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1398.488796] env[62510]: value = "task-1768191" [ 1398.488796] env[62510]: _type = "Task" [ 1398.488796] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.497379] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768191, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.643693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1398.644044] env[62510]: DEBUG nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1398.646923] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.279s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.650275] env[62510]: INFO nova.compute.claims [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1398.758449] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.793020] env[62510]: DEBUG oslo_vmware.api [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768189, 'name': PowerOnVM_Task, 'duration_secs': 0.485726} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.793020] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1398.793020] env[62510]: INFO nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Took 12.38 seconds to spawn the instance on the hypervisor. [ 1398.793020] env[62510]: DEBUG nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1398.793020] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fa4ba4-504e-45cf-8048-1a1a79aef34b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.816315] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768190, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.819013] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52627c15-c870-8092-6dee-20f46270e907, 'name': SearchDatastore_Task, 'duration_secs': 0.02442} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.819013] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1398.819343] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1398.819432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1398.819598] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.819826] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.820118] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52bba07d-0fc3-446f-957d-07fd7e41d624 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.881556] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "c58184e7-bf4f-406b-a778-9b8f60740fe6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.881793] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.912047] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.912288] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.944413] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.945096] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1398.998057] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768191, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.153703] env[62510]: DEBUG nova.compute.utils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1399.165171] env[62510]: DEBUG nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1399.167921] env[62510]: DEBUG nova.network.neutron [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1399.264518] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.323402] env[62510]: DEBUG nova.policy [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d7b920a8fff4109addbd35ee8737062', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f756c440e2b4614ac89e1d5695cab2b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1399.325361] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768190, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.326423] env[62510]: INFO nova.compute.manager [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Took 17.22 seconds to build instance. [ 1399.362828] env[62510]: DEBUG nova.network.neutron [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updated VIF entry in instance network info cache for port 7489ebb6-ec5f-4097-9a62-81a2d3dedd52. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1399.362828] env[62510]: DEBUG nova.network.neutron [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [{"id": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "address": "fa:16:3e:81:65:65", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7489ebb6-ec", "ovs_interfaceid": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.499086] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768191, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.665767] env[62510]: DEBUG nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1399.761997] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.813453] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768190, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.828749] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfa593d6-b4a4-48f1-bd7f-d282a782d8d0 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.734s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.865312] env[62510]: DEBUG oslo_concurrency.lockutils [req-eaa8cf9f-7d2f-42eb-a52d-106f7ddc0f63 req-c69cc751-0ea7-448d-9528-958056ef233a service nova] Releasing lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.945349] env[62510]: DEBUG nova.network.neutron [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Successfully created port: c9624a79-da6f-44aa-87fe-e5872f2e1d7d {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1400.011838] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768191, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.016887] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdad82c-0df4-4e99-b38c-2b08c70a87f5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.027447] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c3d727-82e0-4ace-97bb-eb64d7186ea6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.064103] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe536a61-ab2c-47f6-b972-9ba1250a36a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.072466] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8a4af8-34f8-4a95-a721-4afdd651fdcc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.090616] env[62510]: DEBUG nova.compute.provider_tree [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.194108] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.194108] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.194684] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa8d6c49-6cec-4d1a-adda-3c2317b36173 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.200776] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1400.200776] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5235bff0-7051-d7ef-6a4f-2379375f1bf3" [ 1400.200776] env[62510]: _type = "Task" [ 1400.200776] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.212023] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5235bff0-7051-d7ef-6a4f-2379375f1bf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.259923] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768187, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.316580] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768190, 'name': CreateVM_Task, 'duration_secs': 1.942086} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.316766] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1400.321308] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.321560] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.321940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1400.322275] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a82b9e93-51ac-4573-92be-2b03766f1df0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.330487] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1400.330487] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e7f313-d9d1-b3d7-316a-e8ffa1e55070" [ 1400.330487] env[62510]: _type = "Task" [ 1400.330487] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.334013] env[62510]: DEBUG nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1400.352819] env[62510]: DEBUG nova.compute.manager [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Received event network-changed-55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1400.353040] env[62510]: DEBUG nova.compute.manager [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Refreshing instance network info cache due to event network-changed-55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1400.353284] env[62510]: DEBUG oslo_concurrency.lockutils [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] Acquiring lock "refresh_cache-3266d254-4a75-4fd3-b4e7-ebeb86467cbe" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.353442] env[62510]: DEBUG oslo_concurrency.lockutils [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] Acquired lock "refresh_cache-3266d254-4a75-4fd3-b4e7-ebeb86467cbe" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.353643] env[62510]: DEBUG nova.network.neutron [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Refreshing network info cache for port 55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1400.359235] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e7f313-d9d1-b3d7-316a-e8ffa1e55070, 'name': SearchDatastore_Task, 'duration_secs': 0.007967} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.359803] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.360061] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.360274] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.501938] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768191, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.721533} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.502355] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1400.503603] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d933d5f-77e3-44b1-9231-a706379c93d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.526623] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 12768001-6ed0-47be-8f20-c59ee82b842a/12768001-6ed0-47be-8f20-c59ee82b842a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1400.527142] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddd12339-30d6-475a-ad84-eaaf2c045d43 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.548932] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1400.548932] env[62510]: value = "task-1768192" [ 1400.548932] env[62510]: _type = "Task" [ 1400.548932] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.557781] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768192, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.597600] env[62510]: DEBUG nova.scheduler.client.report [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1400.676694] env[62510]: DEBUG nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1400.720036] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1400.720319] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1400.720565] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1400.720695] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1400.720848] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1400.721013] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1400.721296] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1400.721480] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1400.721630] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1400.721798] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1400.721981] env[62510]: DEBUG nova.virt.hardware [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1400.722331] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5235bff0-7051-d7ef-6a4f-2379375f1bf3, 'name': SearchDatastore_Task, 'duration_secs': 0.009448} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.723106] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caee12cd-68b8-4174-a715-0deaf81c081e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.728869] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91600c16-a7d4-49b8-84f1-36a1f1782f52 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.737298] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0648bca-1b22-427b-8e7f-4cd9f41e951a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.741319] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1400.741319] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ea948d-5736-a04a-8526-54a549a51e5d" [ 1400.741319] env[62510]: _type = "Task" [ 1400.741319] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.760235] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ea948d-5736-a04a-8526-54a549a51e5d, 'name': SearchDatastore_Task, 'duration_secs': 0.010579} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.760822] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.761094] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 3266d254-4a75-4fd3-b4e7-ebeb86467cbe/3266d254-4a75-4fd3-b4e7-ebeb86467cbe.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1400.761428] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.761716] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.761803] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a953a13c-3357-4bcd-90fd-a4b6330f7548 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.770229] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1d4b803-c1c8-4866-a7c4-acae31be2860 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.772383] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768187, 'name': ReconfigVM_Task, 'duration_secs': 2.032479} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.773018] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.775039] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58dcd672-2aed-42a0-b327-36010ac6e0fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.778123] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1400.778123] env[62510]: value = "task-1768193" [ 1400.778123] env[62510]: _type = "Task" [ 1400.778123] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.783209] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.783209] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.783900] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1400.783900] env[62510]: value = "task-1768194" [ 1400.783900] env[62510]: _type = "Task" [ 1400.783900] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.784676] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0854dfc-e548-43f0-a891-82d2720caae0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.793651] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.802611] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768194, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.804150] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1400.804150] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5297ea6a-31df-6c37-9040-82b7e9dc3cca" [ 1400.804150] env[62510]: _type = "Task" [ 1400.804150] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.813594] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5297ea6a-31df-6c37-9040-82b7e9dc3cca, 'name': SearchDatastore_Task, 'duration_secs': 0.011498} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.814265] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e6d5524-9348-4ec1-83b4-f37f8f5e16b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.820476] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1400.820476] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ea1459-44a4-a048-d354-eff41e935cb5" [ 1400.820476] env[62510]: _type = "Task" [ 1400.820476] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.831177] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ea1459-44a4-a048-d354-eff41e935cb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.868445] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.059431] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768192, 'name': ReconfigVM_Task, 'duration_secs': 0.325272} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.059805] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 12768001-6ed0-47be-8f20-c59ee82b842a/12768001-6ed0-47be-8f20-c59ee82b842a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1401.060602] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46c41e62-f9ec-4587-9fee-286df3791630 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.067431] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1401.067431] env[62510]: value = "task-1768195" [ 1401.067431] env[62510]: _type = "Task" [ 1401.067431] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.077486] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768195, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.109171] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.109171] env[62510]: DEBUG nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1401.113110] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.531s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.116190] env[62510]: INFO nova.compute.claims [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1401.292730] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768193, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.305268] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768194, 'name': Rename_Task, 'duration_secs': 0.191858} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.306025] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.306096] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58e5c156-aa39-42fb-b8e3-352e82807ed4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.315546] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1401.315546] env[62510]: value = "task-1768196" [ 1401.315546] env[62510]: _type = "Task" [ 1401.315546] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.342555] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768196, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.349015] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ea1459-44a4-a048-d354-eff41e935cb5, 'name': SearchDatastore_Task, 'duration_secs': 0.013657} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.349432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.349802] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 731e7110-9709-4c4e-96d2-00e21e67c6e3/731e7110-9709-4c4e-96d2-00e21e67c6e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1401.350781] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-564cc508-2aaa-4bdc-b68d-afefb10b9683 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.360059] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1401.360059] env[62510]: value = "task-1768197" [ 1401.360059] env[62510]: _type = "Task" [ 1401.360059] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.377197] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.579644] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768195, 'name': Rename_Task, 'duration_secs': 0.178887} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.580856] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.580856] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f3ad6e6-c74e-4538-9207-c51a21bb7a29 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.588332] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1401.588332] env[62510]: value = "task-1768198" [ 1401.588332] env[62510]: _type = "Task" [ 1401.588332] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.603695] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768198, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.624158] env[62510]: DEBUG nova.compute.utils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1401.628514] env[62510]: DEBUG nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1401.628514] env[62510]: DEBUG nova.network.neutron [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1401.640604] env[62510]: DEBUG nova.network.neutron [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Updated VIF entry in instance network info cache for port 55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1401.640847] env[62510]: DEBUG nova.network.neutron [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Updating instance_info_cache with network_info: [{"id": "55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c", "address": "fa:16:3e:4f:f8:90", "network": {"id": "30c7a896-1d3b-4c07-900b-66caaf19e751", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-204185731-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97e5d626c8c04377b1653057d6fb63e8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "517421c3-bea0-419c-ab0b-987815e5d160", "external-id": "nsx-vlan-transportzone-68", "segmentation_id": 68, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55c9d2e9-7f", "ovs_interfaceid": "55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.655021] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Successfully updated port: e83f36f6-e38c-49b4-b419-59f9030e6005 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1401.786471] env[62510]: DEBUG nova.policy [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcc6b50d1d8f49a9b9017b9532696221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c852b1f21b054fd0b6961685dcf528f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1401.793345] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.684187} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.793707] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 3266d254-4a75-4fd3-b4e7-ebeb86467cbe/3266d254-4a75-4fd3-b4e7-ebeb86467cbe.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1401.793891] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1401.794173] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44301d9b-9329-4ecd-9e03-221e986b982d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.802211] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1401.802211] env[62510]: value = "task-1768199" [ 1401.802211] env[62510]: _type = "Task" [ 1401.802211] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.812206] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.832584] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768196, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.880573] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768197, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.109928] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768198, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.128790] env[62510]: DEBUG nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1402.146461] env[62510]: DEBUG oslo_concurrency.lockutils [req-30429799-31b6-4ad0-af3f-9a7c2f4ba469 req-189361c7-fb88-4dc3-80d0-f82b9d3552f6 service nova] Releasing lock "refresh_cache-3266d254-4a75-4fd3-b4e7-ebeb86467cbe" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.171275] env[62510]: DEBUG nova.compute.manager [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-vif-plugged-ca5eb991-9338-4e3a-8dcc-322896c420df {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1402.171275] env[62510]: DEBUG oslo_concurrency.lockutils [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] Acquiring lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.171275] env[62510]: DEBUG oslo_concurrency.lockutils [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.171275] env[62510]: DEBUG oslo_concurrency.lockutils [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.171275] env[62510]: DEBUG nova.compute.manager [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] No waiting events found dispatching network-vif-plugged-ca5eb991-9338-4e3a-8dcc-322896c420df {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1402.171574] env[62510]: WARNING nova.compute.manager [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received unexpected event network-vif-plugged-ca5eb991-9338-4e3a-8dcc-322896c420df for instance with vm_state building and task_state spawning. [ 1402.171574] env[62510]: DEBUG nova.compute.manager [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-changed-ca5eb991-9338-4e3a-8dcc-322896c420df {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1402.171574] env[62510]: DEBUG nova.compute.manager [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Refreshing instance network info cache due to event network-changed-ca5eb991-9338-4e3a-8dcc-322896c420df. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1402.171574] env[62510]: DEBUG oslo_concurrency.lockutils [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] Acquiring lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.172331] env[62510]: DEBUG oslo_concurrency.lockutils [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] Acquired lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.172769] env[62510]: DEBUG nova.network.neutron [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Refreshing network info cache for port ca5eb991-9338-4e3a-8dcc-322896c420df {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1402.320261] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.123225} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.323355] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1402.327155] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdc5277-efcf-4a8c-8f41-d06a449c15b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.344802] env[62510]: DEBUG oslo_vmware.api [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768196, 'name': PowerOnVM_Task, 'duration_secs': 1.010305} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.354457] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.354590] env[62510]: INFO nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Took 13.71 seconds to spawn the instance on the hypervisor. [ 1402.354811] env[62510]: DEBUG nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1402.366933] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 3266d254-4a75-4fd3-b4e7-ebeb86467cbe/3266d254-4a75-4fd3-b4e7-ebeb86467cbe.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.371441] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001abea6-5c5e-4903-abe6-eec68391e0a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.374208] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5894d3ba-5be2-4bd0-b996-e918f781b05b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.390605] env[62510]: DEBUG nova.network.neutron [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Successfully created port: 7a0f3d5f-4630-470a-9084-d1e05b4f306e {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1402.401432] env[62510]: DEBUG nova.network.neutron [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Successfully updated port: c9624a79-da6f-44aa-87fe-e5872f2e1d7d {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1402.416715] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1402.416715] env[62510]: value = "task-1768200" [ 1402.416715] env[62510]: _type = "Task" [ 1402.416715] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.416715] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614707} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.417459] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 731e7110-9709-4c4e-96d2-00e21e67c6e3/731e7110-9709-4c4e-96d2-00e21e67c6e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1402.418373] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1402.421928] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab23e7ac-2821-461f-82b1-e327b63a7832 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.438224] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.439956] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1402.439956] env[62510]: value = "task-1768201" [ 1402.439956] env[62510]: _type = "Task" [ 1402.439956] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.454809] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.604532] env[62510]: DEBUG oslo_vmware.api [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768198, 'name': PowerOnVM_Task, 'duration_secs': 0.596242} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.604927] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1402.605773] env[62510]: INFO nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Took 11.71 seconds to spawn the instance on the hypervisor. [ 1402.606043] env[62510]: DEBUG nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1402.609965] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4c5125-f256-4229-9665-8b292bf47cc8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.631203] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b7a529-892e-40d2-a013-759a27593fbf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.644143] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31b442c-9f80-49da-9064-88532bfcbd5c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.697951] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51dc42b1-14d7-4d74-943c-8f6d1f134f19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.712996] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e46830-f433-4ea4-82b0-f603224655be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.737431] env[62510]: DEBUG nova.compute.provider_tree [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1402.761769] env[62510]: DEBUG nova.network.neutron [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1402.875154] env[62510]: DEBUG nova.network.neutron [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.901199] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Acquiring lock "0604d37b-38c5-4510-894e-b26fd44e17c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.901614] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.901759] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Acquiring lock "0604d37b-38c5-4510-894e-b26fd44e17c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1402.902014] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.902215] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.904265] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "refresh_cache-585784c5-b56a-435d-8b22-53bc5cb39b25" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.904391] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquired lock "refresh_cache-585784c5-b56a-435d-8b22-53bc5cb39b25" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.904541] env[62510]: DEBUG nova.network.neutron [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1402.907582] env[62510]: INFO nova.compute.manager [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Terminating instance [ 1402.924411] env[62510]: INFO nova.compute.manager [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Took 19.03 seconds to build instance. [ 1402.932858] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768200, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.956683] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.124851} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.956985] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1402.958289] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcf8085-a418-4853-b508-04365fad795b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.982484] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 731e7110-9709-4c4e-96d2-00e21e67c6e3/731e7110-9709-4c4e-96d2-00e21e67c6e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.982790] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cd357ca-59a4-409d-a0df-f3e8e546caf5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.006431] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1403.006431] env[62510]: value = "task-1768202" [ 1403.006431] env[62510]: _type = "Task" [ 1403.006431] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.017031] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.130752] env[62510]: INFO nova.compute.manager [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Took 18.37 seconds to build instance. [ 1403.139701] env[62510]: DEBUG nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1403.177703] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1403.177951] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1403.178405] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1403.178666] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1403.178829] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1403.178982] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1403.179209] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1403.179437] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1403.179811] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1403.179811] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1403.179899] env[62510]: DEBUG nova.virt.hardware [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1403.181101] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8e7bed-6e64-4f9c-9b4f-459a436d5971 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.191781] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3de6dc6-5842-4082-9940-78d2d65858af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.261208] env[62510]: ERROR nova.scheduler.client.report [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [req-3209b557-af8c-4d70-b207-06b3a5e5a92c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3209b557-af8c-4d70-b207-06b3a5e5a92c"}]} [ 1403.283638] env[62510]: DEBUG nova.scheduler.client.report [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1403.301537] env[62510]: DEBUG nova.scheduler.client.report [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1403.301826] env[62510]: DEBUG nova.compute.provider_tree [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1403.316931] env[62510]: DEBUG nova.scheduler.client.report [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1403.340690] env[62510]: DEBUG nova.scheduler.client.report [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1403.378560] env[62510]: DEBUG oslo_concurrency.lockutils [req-fa15d00e-120b-401c-a398-d09ba67d8f08 req-63425def-3f28-48c8-9770-db4ac8eabe98 service nova] Releasing lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.414800] env[62510]: DEBUG nova.compute.manager [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1403.415943] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1403.417194] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531000e5-5efd-44b2-a6fb-01ea3a85841a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.435044] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6db4d386-964d-4199-b8cc-d78539a32418 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "5588650b-c450-489a-a456-3b580a5b9114" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.543s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.436395] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1403.441501] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e4ae53b-31c1-40c9-8e0d-e2819e1fa240 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.443900] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768200, 'name': ReconfigVM_Task, 'duration_secs': 0.652536} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.443983] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 3266d254-4a75-4fd3-b4e7-ebeb86467cbe/3266d254-4a75-4fd3-b4e7-ebeb86467cbe.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1403.444943] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62ba95d1-88b9-40b2-946a-6e905edeb2d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.450929] env[62510]: DEBUG oslo_vmware.api [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Waiting for the task: (returnval){ [ 1403.450929] env[62510]: value = "task-1768203" [ 1403.450929] env[62510]: _type = "Task" [ 1403.450929] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.460970] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1403.460970] env[62510]: value = "task-1768204" [ 1403.460970] env[62510]: _type = "Task" [ 1403.460970] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.462710] env[62510]: DEBUG nova.network.neutron [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1403.479980] env[62510]: DEBUG oslo_vmware.api [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Task: {'id': task-1768203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.489071] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768204, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.519522] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.632564] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5af6f516-e584-4b94-8129-abe0b583e646 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.876s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.723823] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d238155-29a3-4309-905c-3e45376779a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.732358] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965d398e-20c6-409f-8a43-78e5f5ba6b5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.769642] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c33a67-1108-4191-a4c5-d54ecb2256bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.778248] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9c7724-58de-4fd8-9a69-09854ad4aee3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.796699] env[62510]: DEBUG nova.compute.provider_tree [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1403.822289] env[62510]: DEBUG nova.network.neutron [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Updating instance_info_cache with network_info: [{"id": "c9624a79-da6f-44aa-87fe-e5872f2e1d7d", "address": "fa:16:3e:fc:31:a0", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.155", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9624a79-da", "ovs_interfaceid": "c9624a79-da6f-44aa-87fe-e5872f2e1d7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.933926] env[62510]: DEBUG nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1403.965850] env[62510]: DEBUG oslo_vmware.api [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Task: {'id': task-1768203, 'name': PowerOffVM_Task, 'duration_secs': 0.217139} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.970575] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1403.971600] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.972223] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6614e2e-34b0-4c50-8521-3b33a37e1fc0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.981325] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768204, 'name': Rename_Task, 'duration_secs': 0.267824} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.981325] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1403.981583] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-489df8b2-320e-4409-a5bd-bb8fff904bb5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.990433] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1403.990433] env[62510]: value = "task-1768206" [ 1403.990433] env[62510]: _type = "Task" [ 1403.990433] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.003812] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.019578] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768202, 'name': ReconfigVM_Task, 'duration_secs': 0.859128} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.019578] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 731e7110-9709-4c4e-96d2-00e21e67c6e3/731e7110-9709-4c4e-96d2-00e21e67c6e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.020358] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f60349b-05a6-4eb7-adfa-56f7fd2b8f11 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.027987] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1404.027987] env[62510]: value = "task-1768207" [ 1404.027987] env[62510]: _type = "Task" [ 1404.027987] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.038323] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768207, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.139115] env[62510]: DEBUG nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1404.328883] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Releasing lock "refresh_cache-585784c5-b56a-435d-8b22-53bc5cb39b25" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.329244] env[62510]: DEBUG nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Instance network_info: |[{"id": "c9624a79-da6f-44aa-87fe-e5872f2e1d7d", "address": "fa:16:3e:fc:31:a0", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.155", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9624a79-da", "ovs_interfaceid": "c9624a79-da6f-44aa-87fe-e5872f2e1d7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1404.329926] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:31:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9624a79-da6f-44aa-87fe-e5872f2e1d7d', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1404.339987] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Creating folder: Project (2f756c440e2b4614ac89e1d5695cab2b). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.339987] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-481b7315-00ec-43b0-9f80-be7e3a560b0c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.357195] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Created folder: Project (2f756c440e2b4614ac89e1d5695cab2b) in parent group-v367197. [ 1404.357549] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Creating folder: Instances. Parent ref: group-v367213. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.357755] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd3d4b91-9312-45b0-9920-7235c32dc999 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.374891] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Created folder: Instances in parent group-v367213. [ 1404.375190] env[62510]: DEBUG oslo.service.loopingcall [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.375413] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1404.375640] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55672d4c-d223-4c3e-8873-a01c91c87500 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.398274] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1404.398274] env[62510]: value = "task-1768210" [ 1404.398274] env[62510]: _type = "Task" [ 1404.398274] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.408248] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768210, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.414047] env[62510]: DEBUG nova.scheduler.client.report [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 23 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1404.414378] env[62510]: DEBUG nova.compute.provider_tree [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 23 to 24 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1404.414573] env[62510]: DEBUG nova.compute.provider_tree [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1404.469369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.509510] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768206, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.544843] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768207, 'name': Rename_Task, 'duration_secs': 0.2973} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.544843] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.546306] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-854e0773-05ab-452c-af4f-488f891601cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.557055] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1404.557055] env[62510]: value = "task-1768211" [ 1404.557055] env[62510]: _type = "Task" [ 1404.557055] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.569143] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768211, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.576027] env[62510]: DEBUG nova.network.neutron [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Successfully updated port: 7a0f3d5f-4630-470a-9084-d1e05b4f306e {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1404.669698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.917067] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768210, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.926020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.811s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.926020] env[62510]: DEBUG nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1404.928317] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.329s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.931105] env[62510]: INFO nova.compute.claims [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1404.983390] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1404.983390] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1404.983390] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Deleting the datastore file [datastore1] 0604d37b-38c5-4510-894e-b26fd44e17c5 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1404.983390] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d26c730-f1ae-4bbc-ac9b-806b0c06594e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.990777] env[62510]: DEBUG oslo_vmware.api [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Waiting for the task: (returnval){ [ 1404.990777] env[62510]: value = "task-1768212" [ 1404.990777] env[62510]: _type = "Task" [ 1404.990777] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.007331] env[62510]: DEBUG oslo_vmware.api [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768206, 'name': PowerOnVM_Task, 'duration_secs': 0.682645} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.013552] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.013885] env[62510]: INFO nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Took 11.75 seconds to spawn the instance on the hypervisor. [ 1405.014206] env[62510]: DEBUG nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1405.014592] env[62510]: DEBUG oslo_vmware.api [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Task: {'id': task-1768212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.016784] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d997d3a-24d8-47bb-aff0-82e209b5e030 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.021961] env[62510]: DEBUG nova.compute.manager [req-a4b29453-0bca-4cbc-a9f1-92c3d2c1d96c req-2c04d68c-309f-49c0-b1d5-1c75d589367c service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Received event network-vif-plugged-c9624a79-da6f-44aa-87fe-e5872f2e1d7d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1405.022675] env[62510]: DEBUG oslo_concurrency.lockutils [req-a4b29453-0bca-4cbc-a9f1-92c3d2c1d96c req-2c04d68c-309f-49c0-b1d5-1c75d589367c service nova] Acquiring lock "585784c5-b56a-435d-8b22-53bc5cb39b25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1405.022675] env[62510]: DEBUG oslo_concurrency.lockutils [req-a4b29453-0bca-4cbc-a9f1-92c3d2c1d96c req-2c04d68c-309f-49c0-b1d5-1c75d589367c service nova] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1405.022856] env[62510]: DEBUG oslo_concurrency.lockutils [req-a4b29453-0bca-4cbc-a9f1-92c3d2c1d96c req-2c04d68c-309f-49c0-b1d5-1c75d589367c service nova] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.022997] env[62510]: DEBUG nova.compute.manager [req-a4b29453-0bca-4cbc-a9f1-92c3d2c1d96c req-2c04d68c-309f-49c0-b1d5-1c75d589367c service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] No waiting events found dispatching network-vif-plugged-c9624a79-da6f-44aa-87fe-e5872f2e1d7d {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1405.023192] env[62510]: WARNING nova.compute.manager [req-a4b29453-0bca-4cbc-a9f1-92c3d2c1d96c req-2c04d68c-309f-49c0-b1d5-1c75d589367c service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Received unexpected event network-vif-plugged-c9624a79-da6f-44aa-87fe-e5872f2e1d7d for instance with vm_state building and task_state spawning. [ 1405.071598] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768211, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.079439] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.080062] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.080690] env[62510]: DEBUG nova.network.neutron [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1405.411589] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768210, 'name': CreateVM_Task, 'duration_secs': 0.826315} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.413136] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1405.414379] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.414725] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.415216] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1405.415638] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7c414d1-2ab6-4cf8-9209-02d20e910c4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.425024] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1405.425024] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f8f67b-a0a7-8448-2f13-804cb99f9943" [ 1405.425024] env[62510]: _type = "Task" [ 1405.425024] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.432605] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f8f67b-a0a7-8448-2f13-804cb99f9943, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.439257] env[62510]: DEBUG nova.compute.utils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1405.441688] env[62510]: DEBUG nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1405.442140] env[62510]: DEBUG nova.network.neutron [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1405.504929] env[62510]: DEBUG oslo_vmware.api [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Task: {'id': task-1768212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.546082] env[62510]: INFO nova.compute.manager [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Took 19.24 seconds to build instance. [ 1405.571306] env[62510]: DEBUG oslo_vmware.api [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768211, 'name': PowerOnVM_Task, 'duration_secs': 0.665799} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.572495] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.572703] env[62510]: INFO nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Took 7.47 seconds to spawn the instance on the hypervisor. [ 1405.572897] env[62510]: DEBUG nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1405.573989] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf173a9-b3e3-4259-bcc7-41eca213b762 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.625203] env[62510]: DEBUG nova.policy [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93a5cec1868248c0b5cf2c88d9099148', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44fbb61ad5364e0cb30d884cf96fe671', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1405.657341] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Successfully updated port: 55cae8a5-e495-4d62-a2c0-b2effaf346ec {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1405.683433] env[62510]: DEBUG nova.network.neutron [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1405.940892] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f8f67b-a0a7-8448-2f13-804cb99f9943, 'name': SearchDatastore_Task, 'duration_secs': 0.05358} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.942196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.942491] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1405.942741] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.942889] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.943077] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1405.943690] env[62510]: DEBUG nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1405.946382] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75d591a4-20ba-47ea-9a48-04ee97a1152b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.960360] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1405.960578] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1405.961387] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00e70664-2c4f-48c8-a6f9-10f5a3ae609e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.973407] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1405.973407] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52608cc4-19bf-73d1-5b9d-3dee23796930" [ 1405.973407] env[62510]: _type = "Task" [ 1405.973407] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.987646] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52608cc4-19bf-73d1-5b9d-3dee23796930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.006609] env[62510]: DEBUG oslo_vmware.api [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Task: {'id': task-1768212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.559558} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.008070] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1406.008347] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1406.009072] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1406.009072] env[62510]: INFO nova.compute.manager [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Took 2.59 seconds to destroy the instance on the hypervisor. [ 1406.009072] env[62510]: DEBUG oslo.service.loopingcall [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1406.009559] env[62510]: DEBUG nova.compute.manager [-] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1406.009673] env[62510]: DEBUG nova.network.neutron [-] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1406.048184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58c89145-ebba-4b9d-bbf9-509eb312c734 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.754s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.095256] env[62510]: INFO nova.compute.manager [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Took 19.57 seconds to build instance. [ 1406.160222] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.160525] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.160602] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1406.292146] env[62510]: DEBUG nova.network.neutron [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Updating instance_info_cache with network_info: [{"id": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "address": "fa:16:3e:85:5f:26", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0f3d5f-46", "ovs_interfaceid": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.305542] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baf6927-a794-4f14-a82c-28cdffde6338 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.315206] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a7cc20-ef14-42da-83c0-1fdaccb474bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.351167] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461f8e7d-1dd5-45bc-82f4-2b09c515a915 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.360654] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a035e811-9021-4c9a-9c0f-fd84347b8476 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.378544] env[62510]: DEBUG nova.compute.provider_tree [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.487800] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52608cc4-19bf-73d1-5b9d-3dee23796930, 'name': SearchDatastore_Task, 'duration_secs': 0.024982} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.487800] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b99d3a1c-d788-4b1c-9c60-f9e1b5aec3de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.498034] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1406.498034] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5238955e-319f-ce30-b1f4-779f51aa150b" [ 1406.498034] env[62510]: _type = "Task" [ 1406.498034] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.508798] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5238955e-319f-ce30-b1f4-779f51aa150b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.554342] env[62510]: DEBUG nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1406.561204] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.561611] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.600047] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3cc64e91-6551-491b-a436-bfe8d2424408 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "731e7110-9709-4c4e-96d2-00e21e67c6e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.088s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.748193] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1406.751498] env[62510]: DEBUG nova.network.neutron [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Successfully created port: f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1406.797417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1406.797721] env[62510]: DEBUG nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Instance network_info: |[{"id": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "address": "fa:16:3e:85:5f:26", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0f3d5f-46", "ovs_interfaceid": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1406.799254] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:5f:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a0f3d5f-4630-470a-9084-d1e05b4f306e', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1406.809173] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Creating folder: Project (c852b1f21b054fd0b6961685dcf528f5). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1406.810082] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf074889-a655-46e7-9ab2-7193cefa557b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.824878] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Created folder: Project (c852b1f21b054fd0b6961685dcf528f5) in parent group-v367197. [ 1406.825200] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Creating folder: Instances. Parent ref: group-v367216. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1406.825366] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24c309f6-bdeb-464a-9dae-3c5bca162a2b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.838726] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Created folder: Instances in parent group-v367216. [ 1406.839366] env[62510]: DEBUG oslo.service.loopingcall [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1406.839659] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1406.839889] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b0aa04f-f633-45b5-a8f0-b3903e60ce77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.864843] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1406.864843] env[62510]: value = "task-1768215" [ 1406.864843] env[62510]: _type = "Task" [ 1406.864843] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.877435] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768215, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.884560] env[62510]: DEBUG nova.scheduler.client.report [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1406.956956] env[62510]: DEBUG nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1406.991683] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1406.991956] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1406.992139] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1406.992335] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1406.992492] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1406.992641] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1406.992880] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1406.994049] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1406.994345] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1406.995438] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1406.995438] env[62510]: DEBUG nova.virt.hardware [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1406.996246] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb8606e-49ae-4b13-b616-2357116dde59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.010147] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5238955e-319f-ce30-b1f4-779f51aa150b, 'name': SearchDatastore_Task, 'duration_secs': 0.03975} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.012944] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.012944] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 585784c5-b56a-435d-8b22-53bc5cb39b25/585784c5-b56a-435d-8b22-53bc5cb39b25.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1407.013115] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0d8814b-904b-4045-8fba-3d47fe65fc61 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.016469] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52e1fec-50a7-4e89-af75-f1f9b3c1e89a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.037730] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1407.037730] env[62510]: value = "task-1768216" [ 1407.037730] env[62510]: _type = "Task" [ 1407.037730] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.051090] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768216, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.086689] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.093144] env[62510]: DEBUG nova.compute.manager [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-vif-plugged-e83f36f6-e38c-49b4-b419-59f9030e6005 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1407.093490] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] Acquiring lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.094147] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.094147] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.094568] env[62510]: DEBUG nova.compute.manager [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] No waiting events found dispatching network-vif-plugged-e83f36f6-e38c-49b4-b419-59f9030e6005 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1407.095225] env[62510]: WARNING nova.compute.manager [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received unexpected event network-vif-plugged-e83f36f6-e38c-49b4-b419-59f9030e6005 for instance with vm_state building and task_state spawning. [ 1407.097370] env[62510]: DEBUG nova.compute.manager [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-changed-e83f36f6-e38c-49b4-b419-59f9030e6005 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1407.097370] env[62510]: DEBUG nova.compute.manager [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Refreshing instance network info cache due to event network-changed-e83f36f6-e38c-49b4-b419-59f9030e6005. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1407.097370] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] Acquiring lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.103247] env[62510]: DEBUG nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1407.378630] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768215, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.391207] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.391207] env[62510]: DEBUG nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1407.394223] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 10.156s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.394459] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.398023] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1407.398023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.527s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.398023] env[62510]: INFO nova.compute.claims [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1407.404906] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1edbdda-c59e-49b3-9239-646140af86ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.420237] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da1f983-e637-418d-a4e6-607c8d788bcb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.452664] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e1eb2c-86a6-409c-b5fb-4d8be34b54cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.462622] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb0ab006-2e63-459e-8639-8d57e28c0a1a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.500618] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181381MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1407.500883] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.552533] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768216, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.568618] env[62510]: DEBUG nova.network.neutron [-] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.648177] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.878333] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768215, 'name': CreateVM_Task, 'duration_secs': 0.710113} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.878552] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1407.879365] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.879540] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.879899] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1407.880191] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ae616f0-ea6f-4df6-b575-2d48731d9c7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.888383] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1407.888383] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52aee594-c68d-388e-e673-6796d02636f5" [ 1407.888383] env[62510]: _type = "Task" [ 1407.888383] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.897934] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52aee594-c68d-388e-e673-6796d02636f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.905534] env[62510]: DEBUG nova.compute.utils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1407.907077] env[62510]: DEBUG nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1407.907303] env[62510]: DEBUG nova.network.neutron [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1408.016501] env[62510]: DEBUG nova.policy [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcc6b50d1d8f49a9b9017b9532696221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c852b1f21b054fd0b6961685dcf528f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1408.054031] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768216, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612895} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.054031] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 585784c5-b56a-435d-8b22-53bc5cb39b25/585784c5-b56a-435d-8b22-53bc5cb39b25.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1408.054031] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1408.054031] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45991abc-626d-4be0-a7d4-15b88f72c57b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.064361] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1408.064361] env[62510]: value = "task-1768217" [ 1408.064361] env[62510]: _type = "Task" [ 1408.064361] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.076827] env[62510]: INFO nova.compute.manager [-] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Took 2.07 seconds to deallocate network for instance. [ 1408.077500] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.115084] env[62510]: DEBUG nova.network.neutron [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Updating instance_info_cache with network_info: [{"id": "ca5eb991-9338-4e3a-8dcc-322896c420df", "address": "fa:16:3e:83:59:76", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5eb991-93", "ovs_interfaceid": "ca5eb991-9338-4e3a-8dcc-322896c420df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e83f36f6-e38c-49b4-b419-59f9030e6005", "address": "fa:16:3e:02:7b:22", "network": {"id": "a32391f7-e8f4-4878-855a-13562e648ddc", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-129592885", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape83f36f6-e3", "ovs_interfaceid": "e83f36f6-e38c-49b4-b419-59f9030e6005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "address": "fa:16:3e:88:74:a7", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cae8a5-e4", "ovs_interfaceid": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.236431] env[62510]: DEBUG nova.compute.manager [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1408.399059] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52aee594-c68d-388e-e673-6796d02636f5, 'name': SearchDatastore_Task, 'duration_secs': 0.041755} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.399559] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.399799] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1408.400041] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1408.400192] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.400375] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1408.401075] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75ebba22-dac3-47eb-9968-9f4d192ba0b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.410074] env[62510]: DEBUG nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1408.413653] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1408.413854] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1408.417440] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd98269e-9ed7-43f3-8cf8-217006bd35d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.423856] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1408.423856] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527c5eb0-75ac-5b92-73c3-7130129dbea1" [ 1408.423856] env[62510]: _type = "Task" [ 1408.423856] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.433124] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527c5eb0-75ac-5b92-73c3-7130129dbea1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.577759] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176718} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.578071] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1408.579483] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0d6a83-9911-4c30-a2d5-d0f4688b270b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.585301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.608814] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 585784c5-b56a-435d-8b22-53bc5cb39b25/585784c5-b56a-435d-8b22-53bc5cb39b25.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1408.611817] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-485507fb-d889-4b25-bc01-c107a963ca85 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.627297] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Releasing lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.627684] env[62510]: DEBUG nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Instance network_info: |[{"id": "ca5eb991-9338-4e3a-8dcc-322896c420df", "address": "fa:16:3e:83:59:76", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5eb991-93", "ovs_interfaceid": "ca5eb991-9338-4e3a-8dcc-322896c420df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e83f36f6-e38c-49b4-b419-59f9030e6005", "address": "fa:16:3e:02:7b:22", "network": {"id": "a32391f7-e8f4-4878-855a-13562e648ddc", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-129592885", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape83f36f6-e3", "ovs_interfaceid": "e83f36f6-e38c-49b4-b419-59f9030e6005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "address": "fa:16:3e:88:74:a7", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cae8a5-e4", "ovs_interfaceid": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1408.628014] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] Acquired lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1408.628218] env[62510]: DEBUG nova.network.neutron [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Refreshing network info cache for port e83f36f6-e38c-49b4-b419-59f9030e6005 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1408.629469] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:59:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca5eb991-9338-4e3a-8dcc-322896c420df', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:7b:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bff6c3a1-cc80-46ca-86c0-6dbb029edddb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e83f36f6-e38c-49b4-b419-59f9030e6005', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:74:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4728adca-2846-416a-91a3-deb898faf1f3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55cae8a5-e495-4d62-a2c0-b2effaf346ec', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1408.640442] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Creating folder: Project (d33701c4eedd47268e1c8d16bd63de81). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1408.648405] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-104c0f33-35ef-43d3-ba86-febc6c770dd3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.652482] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1408.652482] env[62510]: value = "task-1768218" [ 1408.652482] env[62510]: _type = "Task" [ 1408.652482] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.662118] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768218, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.663904] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Created folder: Project (d33701c4eedd47268e1c8d16bd63de81) in parent group-v367197. [ 1408.664108] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Creating folder: Instances. Parent ref: group-v367219. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1408.664343] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fd483aa-413e-401a-8a96-f73049bceca0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.677368] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Created folder: Instances in parent group-v367219. [ 1408.677616] env[62510]: DEBUG oslo.service.loopingcall [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1408.677861] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1408.678131] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abdec9b4-0e48-49b8-8356-5e4ba08c3117 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.707582] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1408.707582] env[62510]: value = "task-1768221" [ 1408.707582] env[62510]: _type = "Task" [ 1408.707582] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.721360] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768221, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.764693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1408.829839] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255516b1-4cf3-4888-a361-f89128f9237c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.841944] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bcae6f-55e8-41e5-9173-db5b93e78687 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.882702] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29b398c-c075-4459-bb5e-6226378f7179 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.893584] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be35457e-1bf1-4d5d-8d8c-130d68e75004 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.910827] env[62510]: DEBUG nova.compute.provider_tree [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1408.941116] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527c5eb0-75ac-5b92-73c3-7130129dbea1, 'name': SearchDatastore_Task, 'duration_secs': 0.017048} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.942242] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-256067b2-e0c8-469b-b86c-096db9fc3255 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.950896] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1408.950896] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52146b8f-fe5a-e6eb-6aa2-aefaabdbd2d1" [ 1408.950896] env[62510]: _type = "Task" [ 1408.950896] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.968634] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52146b8f-fe5a-e6eb-6aa2-aefaabdbd2d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.082141] env[62510]: DEBUG nova.network.neutron [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Successfully created port: 6faa955b-378f-4f0d-9181-22e9295cf131 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1409.113854] env[62510]: DEBUG nova.compute.manager [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Received event network-changed-c9624a79-da6f-44aa-87fe-e5872f2e1d7d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1409.114045] env[62510]: DEBUG nova.compute.manager [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Refreshing instance network info cache due to event network-changed-c9624a79-da6f-44aa-87fe-e5872f2e1d7d. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1409.114269] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Acquiring lock "refresh_cache-585784c5-b56a-435d-8b22-53bc5cb39b25" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.114405] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Acquired lock "refresh_cache-585784c5-b56a-435d-8b22-53bc5cb39b25" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.114563] env[62510]: DEBUG nova.network.neutron [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Refreshing network info cache for port c9624a79-da6f-44aa-87fe-e5872f2e1d7d {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.164746] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768218, 'name': ReconfigVM_Task, 'duration_secs': 0.378505} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.164937] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 585784c5-b56a-435d-8b22-53bc5cb39b25/585784c5-b56a-435d-8b22-53bc5cb39b25.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.165612] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be95e4b5-ed7a-4994-88eb-c9683877ff75 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.175871] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1409.175871] env[62510]: value = "task-1768222" [ 1409.175871] env[62510]: _type = "Task" [ 1409.175871] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.187712] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768222, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.225431] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768221, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.368293] env[62510]: DEBUG nova.network.neutron [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Updated VIF entry in instance network info cache for port e83f36f6-e38c-49b4-b419-59f9030e6005. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1409.368293] env[62510]: DEBUG nova.network.neutron [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Updating instance_info_cache with network_info: [{"id": "ca5eb991-9338-4e3a-8dcc-322896c420df", "address": "fa:16:3e:83:59:76", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5eb991-93", "ovs_interfaceid": "ca5eb991-9338-4e3a-8dcc-322896c420df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e83f36f6-e38c-49b4-b419-59f9030e6005", "address": "fa:16:3e:02:7b:22", "network": {"id": "a32391f7-e8f4-4878-855a-13562e648ddc", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-129592885", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape83f36f6-e3", "ovs_interfaceid": "e83f36f6-e38c-49b4-b419-59f9030e6005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "address": "fa:16:3e:88:74:a7", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cae8a5-e4", "ovs_interfaceid": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.415194] env[62510]: DEBUG nova.scheduler.client.report [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1409.421375] env[62510]: DEBUG nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1409.452566] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1409.453013] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1409.453292] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1409.453388] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1409.453858] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1409.454143] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1409.454483] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1409.454628] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1409.454834] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1409.455034] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1409.455260] env[62510]: DEBUG nova.virt.hardware [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1409.456611] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339286dd-8f05-4b55-8057-5ad7412b2ece {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.472020] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3281cb1-f060-40eb-9456-055f101cf8d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.479862] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52146b8f-fe5a-e6eb-6aa2-aefaabdbd2d1, 'name': SearchDatastore_Task, 'duration_secs': 0.017759} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.479862] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.482593] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c7d875ee-2b9c-48e4-9bf9-f7602e75ec62/c7d875ee-2b9c-48e4-9bf9-f7602e75ec62.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1409.482593] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0177ad08-4440-4faf-9df1-ba868630c488 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.496425] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1409.496425] env[62510]: value = "task-1768223" [ 1409.496425] env[62510]: _type = "Task" [ 1409.496425] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.512578] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.656889] env[62510]: DEBUG nova.network.neutron [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Successfully updated port: f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1409.696012] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768222, 'name': Rename_Task, 'duration_secs': 0.162176} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.696544] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1409.697224] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bea13f57-4006-46c1-97e5-6dd638cc6732 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.708176] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1409.708176] env[62510]: value = "task-1768224" [ 1409.708176] env[62510]: _type = "Task" [ 1409.708176] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.728227] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768224, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.731611] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768221, 'name': CreateVM_Task, 'duration_secs': 0.708348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.731791] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1409.732690] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.732854] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.733669] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1409.734523] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1233d8e-31f3-45f0-82dc-2419bdbab165 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.742089] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1409.742089] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522dcefc-433e-d4fe-d687-464648183ae1" [ 1409.742089] env[62510]: _type = "Task" [ 1409.742089] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.757918] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522dcefc-433e-d4fe-d687-464648183ae1, 'name': SearchDatastore_Task, 'duration_secs': 0.011347} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.757918] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.757918] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1409.758360] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.758360] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.758499] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1409.758908] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e67c5be5-45aa-4d67-9e5e-65b88a03e39f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.769527] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1409.769527] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1409.770221] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b3f58d4-f6e7-4310-b228-d40ff427e3b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.782687] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1409.782687] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523d8788-7607-7864-ed4a-6edcda002a31" [ 1409.782687] env[62510]: _type = "Task" [ 1409.782687] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.797826] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523d8788-7607-7864-ed4a-6edcda002a31, 'name': SearchDatastore_Task, 'duration_secs': 0.012754} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.800106] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df4688e7-5a71-46d0-a9c3-2a4811f38080 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.808867] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1409.808867] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524c1ca7-489b-37b3-e05e-479baf50feb0" [ 1409.808867] env[62510]: _type = "Task" [ 1409.808867] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.822268] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524c1ca7-489b-37b3-e05e-479baf50feb0, 'name': SearchDatastore_Task} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.822546] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.822913] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] eb840df4-edc1-44cb-84c9-f31b7b56b6bd/eb840df4-edc1-44cb-84c9-f31b7b56b6bd.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1409.823063] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3b2d78a-2d3b-4f93-9dbb-81ef533ef527 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.832981] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1409.832981] env[62510]: value = "task-1768225" [ 1409.832981] env[62510]: _type = "Task" [ 1409.832981] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.850524] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.873378] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2206095-baea-481e-b5aa-033a0764c9e5 req-9ef007e6-71df-4acc-966c-2b756ce18a55 service nova] Releasing lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1409.928455] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.928970] env[62510]: DEBUG nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1409.932155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.463s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.933800] env[62510]: INFO nova.compute.claims [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.012629] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768223, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.159228] env[62510]: DEBUG nova.network.neutron [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Updated VIF entry in instance network info cache for port c9624a79-da6f-44aa-87fe-e5872f2e1d7d. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.160186] env[62510]: DEBUG nova.network.neutron [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Updating instance_info_cache with network_info: [{"id": "c9624a79-da6f-44aa-87fe-e5872f2e1d7d", "address": "fa:16:3e:fc:31:a0", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.155", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9624a79-da", "ovs_interfaceid": "c9624a79-da6f-44aa-87fe-e5872f2e1d7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.161278] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.161362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquired lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.164053] env[62510]: DEBUG nova.network.neutron [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1410.228019] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768224, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.235696] env[62510]: DEBUG nova.compute.manager [None req-eb7dfd75-c085-45d6-b1d4-5e00faf4dee3 tempest-ServerDiagnosticsV248Test-363593752 tempest-ServerDiagnosticsV248Test-363593752-project-admin] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1410.235696] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08f993f-ce6d-4025-afab-96bdc496bcb1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.243660] env[62510]: INFO nova.compute.manager [None req-eb7dfd75-c085-45d6-b1d4-5e00faf4dee3 tempest-ServerDiagnosticsV248Test-363593752 tempest-ServerDiagnosticsV248Test-363593752-project-admin] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Retrieving diagnostics [ 1410.248019] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fd60dd-b280-4a33-ac5c-9e936657370f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.350188] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768225, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.444461] env[62510]: DEBUG nova.compute.utils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1410.446697] env[62510]: DEBUG nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1410.450802] env[62510]: DEBUG nova.network.neutron [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1410.476074] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.476351] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.476560] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.476744] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.476912] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.479295] env[62510]: INFO nova.compute.manager [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Terminating instance [ 1410.510708] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768223, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731977} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.511063] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c7d875ee-2b9c-48e4-9bf9-f7602e75ec62/c7d875ee-2b9c-48e4-9bf9-f7602e75ec62.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1410.511334] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1410.511973] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5f6c868-f102-44a8-ab00-2c2246336c58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.540636] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1410.540636] env[62510]: value = "task-1768226" [ 1410.540636] env[62510]: _type = "Task" [ 1410.540636] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.556310] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.611027] env[62510]: DEBUG nova.policy [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97a7f1ca55d549a3985e95b6bbc665f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94a46473611d4b22be7c66c909d1b348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1410.664250] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Releasing lock "refresh_cache-585784c5-b56a-435d-8b22-53bc5cb39b25" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.664686] env[62510]: DEBUG nova.compute.manager [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Received event network-vif-plugged-7a0f3d5f-4630-470a-9084-d1e05b4f306e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1410.664971] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Acquiring lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.665220] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.665391] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.665590] env[62510]: DEBUG nova.compute.manager [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] No waiting events found dispatching network-vif-plugged-7a0f3d5f-4630-470a-9084-d1e05b4f306e {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1410.665728] env[62510]: WARNING nova.compute.manager [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Received unexpected event network-vif-plugged-7a0f3d5f-4630-470a-9084-d1e05b4f306e for instance with vm_state building and task_state spawning. [ 1410.665896] env[62510]: DEBUG nova.compute.manager [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Received event network-changed-7a0f3d5f-4630-470a-9084-d1e05b4f306e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1410.666100] env[62510]: DEBUG nova.compute.manager [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Refreshing instance network info cache due to event network-changed-7a0f3d5f-4630-470a-9084-d1e05b4f306e. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1410.666387] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Acquiring lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.666795] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Acquired lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.667016] env[62510]: DEBUG nova.network.neutron [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Refreshing network info cache for port 7a0f3d5f-4630-470a-9084-d1e05b4f306e {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.723210] env[62510]: DEBUG oslo_vmware.api [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768224, 'name': PowerOnVM_Task, 'duration_secs': 0.722464} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.723561] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1410.723932] env[62510]: INFO nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Took 10.05 seconds to spawn the instance on the hypervisor. [ 1410.724138] env[62510]: DEBUG nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1410.728175] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b970d8de-5647-464e-9f1a-41d0d4b59003 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.769664] env[62510]: DEBUG nova.network.neutron [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1410.851353] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.888885} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.851353] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] eb840df4-edc1-44cb-84c9-f31b7b56b6bd/eb840df4-edc1-44cb-84c9-f31b7b56b6bd.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1410.852053] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1410.852137] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d6c5a33-e673-41cc-ae30-e6f4a01908d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.864091] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1410.864091] env[62510]: value = "task-1768227" [ 1410.864091] env[62510]: _type = "Task" [ 1410.864091] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.875297] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.954398] env[62510]: DEBUG nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1410.978545] env[62510]: DEBUG nova.compute.manager [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-vif-plugged-55cae8a5-e495-4d62-a2c0-b2effaf346ec {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1410.981799] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] Acquiring lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.981799] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.981799] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.981925] env[62510]: DEBUG nova.compute.manager [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] No waiting events found dispatching network-vif-plugged-55cae8a5-e495-4d62-a2c0-b2effaf346ec {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1410.982083] env[62510]: WARNING nova.compute.manager [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received unexpected event network-vif-plugged-55cae8a5-e495-4d62-a2c0-b2effaf346ec for instance with vm_state building and task_state spawning. [ 1410.982697] env[62510]: DEBUG nova.compute.manager [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-changed-55cae8a5-e495-4d62-a2c0-b2effaf346ec {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1410.982697] env[62510]: DEBUG nova.compute.manager [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Refreshing instance network info cache due to event network-changed-55cae8a5-e495-4d62-a2c0-b2effaf346ec. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1410.982697] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] Acquiring lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.982917] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] Acquired lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.982996] env[62510]: DEBUG nova.network.neutron [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Refreshing network info cache for port 55cae8a5-e495-4d62-a2c0-b2effaf346ec {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1410.985337] env[62510]: DEBUG nova.compute.manager [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1410.985525] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1410.987944] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b416ae9b-e779-4aba-a4e1-b26cbc6f2c77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.003218] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1411.003514] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89f07dce-029d-4605-bf7f-7db2b738506d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.017587] env[62510]: DEBUG oslo_vmware.api [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1411.017587] env[62510]: value = "task-1768228" [ 1411.017587] env[62510]: _type = "Task" [ 1411.017587] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.030522] env[62510]: DEBUG oslo_vmware.api [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768228, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.056343] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110281} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.059355] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1411.062620] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe0dcee-4031-4f07-92da-d05f906ea9f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.097965] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] c7d875ee-2b9c-48e4-9bf9-f7602e75ec62/c7d875ee-2b9c-48e4-9bf9-f7602e75ec62.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1411.102769] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4651ade-91de-42cd-8bd9-8fb6e026372d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.126049] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1411.126049] env[62510]: value = "task-1768229" [ 1411.126049] env[62510]: _type = "Task" [ 1411.126049] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.148135] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768229, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.255955] env[62510]: INFO nova.compute.manager [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Took 24.00 seconds to build instance. [ 1411.380614] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.294949} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.383661] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1411.385339] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd1ee14-949d-4fc6-a38d-9661aefd5a5c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.420575] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] eb840df4-edc1-44cb-84c9-f31b7b56b6bd/eb840df4-edc1-44cb-84c9-f31b7b56b6bd.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1411.426095] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d0a2b6c-5e62-4f26-a921-41c11cef8002 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.455724] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1411.455724] env[62510]: value = "task-1768230" [ 1411.455724] env[62510]: _type = "Task" [ 1411.455724] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.472393] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768230, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.508785] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf011524-a676-459f-956f-f0caf871bca4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.525717] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e79281-ba08-4930-ba69-4040c3c0b223 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.570408] env[62510]: DEBUG oslo_vmware.api [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768228, 'name': PowerOffVM_Task, 'duration_secs': 0.238019} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.573842] env[62510]: DEBUG nova.network.neutron [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Updating instance_info_cache with network_info: [{"id": "f3011c4d-9d43-4939-9157-df0532a51861", "address": "fa:16:3e:33:a9:3e", "network": {"id": "1e2d2394-0caf-483c-8fdd-819cbb3d155f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-79341046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44fbb61ad5364e0cb30d884cf96fe671", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3011c4d-9d", "ovs_interfaceid": "f3011c4d-9d43-4939-9157-df0532a51861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.575804] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88962fd-5816-4510-abac-9942f7fafcee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.581914] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1411.582113] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1411.587022] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-920f54d5-976d-41ad-8500-23e6d5886aa2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.592344] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11342d7b-4d18-472a-b824-286ae81d002b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.604296] env[62510]: DEBUG nova.network.neutron [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Successfully updated port: 6faa955b-378f-4f0d-9181-22e9295cf131 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1411.616666] env[62510]: DEBUG nova.compute.provider_tree [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.638104] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.680557] env[62510]: INFO nova.compute.manager [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Rebuilding instance [ 1411.683930] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.684226] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.693694] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1411.694285] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1411.694500] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Deleting the datastore file [datastore1] 3266d254-4a75-4fd3-b4e7-ebeb86467cbe {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1411.694796] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33bbff28-0056-4c08-97d0-658173acf1cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.705163] env[62510]: DEBUG oslo_vmware.api [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for the task: (returnval){ [ 1411.705163] env[62510]: value = "task-1768232" [ 1411.705163] env[62510]: _type = "Task" [ 1411.705163] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.724816] env[62510]: DEBUG oslo_vmware.api [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.750333] env[62510]: DEBUG nova.compute.manager [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1411.750333] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c06b12-7909-4002-b177-21673d624e7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.766561] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8fc58d78-7208-4a49-8138-f8e9db540a85 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.520s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.855268] env[62510]: DEBUG nova.network.neutron [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Successfully created port: f4736e99-c658-4d4e-ace8-a3b4552f43bf {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.937231] env[62510]: DEBUG nova.network.neutron [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Updated VIF entry in instance network info cache for port 7a0f3d5f-4630-470a-9084-d1e05b4f306e. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1411.938098] env[62510]: DEBUG nova.network.neutron [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Updating instance_info_cache with network_info: [{"id": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "address": "fa:16:3e:85:5f:26", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0f3d5f-46", "ovs_interfaceid": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.966458] env[62510]: DEBUG nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1411.974980] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.002143] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.002244] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.002379] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.002555] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.002721] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.002872] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.003920] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.004208] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.004465] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.004583] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.004748] env[62510]: DEBUG nova.virt.hardware [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.005690] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d55e319-668a-4cd9-9c1e-18b2e84825f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.017066] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b9f99d-212d-44e5-b182-81f09329e671 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.083591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Releasing lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.083950] env[62510]: DEBUG nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Instance network_info: |[{"id": "f3011c4d-9d43-4939-9157-df0532a51861", "address": "fa:16:3e:33:a9:3e", "network": {"id": "1e2d2394-0caf-483c-8fdd-819cbb3d155f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-79341046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44fbb61ad5364e0cb30d884cf96fe671", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3011c4d-9d", "ovs_interfaceid": "f3011c4d-9d43-4939-9157-df0532a51861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1412.084407] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:a9:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92233552-2c0c-416e-9bf3-bfcca8eda2dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3011c4d-9d43-4939-9157-df0532a51861', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1412.092333] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Creating folder: Project (44fbb61ad5364e0cb30d884cf96fe671). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1412.093122] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c90e0e6b-19a8-46ec-9ad0-800159d7d25e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.108297] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Created folder: Project (44fbb61ad5364e0cb30d884cf96fe671) in parent group-v367197. [ 1412.108493] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Creating folder: Instances. Parent ref: group-v367222. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1412.108763] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27aa2c19-5ba9-46d9-9e27-f53c17a1e5bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.119697] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "refresh_cache-75e06a24-b96c-4a42-bc2d-b0b960e3301a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.119871] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "refresh_cache-75e06a24-b96c-4a42-bc2d-b0b960e3301a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.120045] env[62510]: DEBUG nova.network.neutron [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.125374] env[62510]: DEBUG nova.scheduler.client.report [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1412.129848] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Created folder: Instances in parent group-v367222. [ 1412.130108] env[62510]: DEBUG oslo.service.loopingcall [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.130575] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1412.135233] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bc84e39-5d16-4a12-a7d7-ca2aa71dfc58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.174054] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768229, 'name': ReconfigVM_Task, 'duration_secs': 0.804048} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.174627] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Reconfigured VM instance instance-00000008 to attach disk [datastore1] c7d875ee-2b9c-48e4-9bf9-f7602e75ec62/c7d875ee-2b9c-48e4-9bf9-f7602e75ec62.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.175492] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1412.175492] env[62510]: value = "task-1768235" [ 1412.175492] env[62510]: _type = "Task" [ 1412.175492] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.175684] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8323ba31-a387-4e92-b6ed-153f50446b46 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.194636] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768235, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.196652] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1412.196652] env[62510]: value = "task-1768236" [ 1412.196652] env[62510]: _type = "Task" [ 1412.196652] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.208078] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768236, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.230732] env[62510]: DEBUG oslo_vmware.api [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Task: {'id': task-1768232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171168} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.231327] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1412.231717] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1412.232229] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1412.232598] env[62510]: INFO nova.compute.manager [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1412.232991] env[62510]: DEBUG oslo.service.loopingcall [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.233310] env[62510]: DEBUG nova.compute.manager [-] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1412.233530] env[62510]: DEBUG nova.network.neutron [-] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1412.276644] env[62510]: DEBUG nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1412.351925] env[62510]: DEBUG nova.network.neutron [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Updated VIF entry in instance network info cache for port 55cae8a5-e495-4d62-a2c0-b2effaf346ec. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.352449] env[62510]: DEBUG nova.network.neutron [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Updating instance_info_cache with network_info: [{"id": "ca5eb991-9338-4e3a-8dcc-322896c420df", "address": "fa:16:3e:83:59:76", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.19", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5eb991-93", "ovs_interfaceid": "ca5eb991-9338-4e3a-8dcc-322896c420df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e83f36f6-e38c-49b4-b419-59f9030e6005", "address": "fa:16:3e:02:7b:22", "network": {"id": "a32391f7-e8f4-4878-855a-13562e648ddc", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-129592885", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.236", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bff6c3a1-cc80-46ca-86c0-6dbb029edddb", "external-id": "nsx-vlan-transportzone-223", "segmentation_id": 223, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape83f36f6-e3", "ovs_interfaceid": "e83f36f6-e38c-49b4-b419-59f9030e6005", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "address": "fa:16:3e:88:74:a7", "network": {"id": "1c6ee7c8-8ce3-4518-85af-ad2c2576ec57", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-436557069", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.86", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4728adca-2846-416a-91a3-deb898faf1f3", "external-id": "nsx-vlan-transportzone-823", "segmentation_id": 823, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55cae8a5-e4", "ovs_interfaceid": "55cae8a5-e495-4d62-a2c0-b2effaf346ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.441558] env[62510]: DEBUG oslo_concurrency.lockutils [req-750a4ddd-1bc7-46e1-b278-9d5d2b4d52d9 req-1e4a550d-3db4-472c-a447-9fa94b2bd60b service nova] Releasing lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.470295] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768230, 'name': ReconfigVM_Task, 'duration_secs': 0.63192} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.470684] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Reconfigured VM instance instance-00000005 to attach disk [datastore1] eb840df4-edc1-44cb-84c9-f31b7b56b6bd/eb840df4-edc1-44cb-84c9-f31b7b56b6bd.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1412.471428] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecb69967-25f5-4acd-9554-32fa7d5ac79d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.479641] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1412.479641] env[62510]: value = "task-1768237" [ 1412.479641] env[62510]: _type = "Task" [ 1412.479641] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.490278] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768237, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.633033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.633033] env[62510]: DEBUG nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1412.642737] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.973s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.644862] env[62510]: INFO nova.compute.claims [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1412.689993] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768235, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.709895] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768236, 'name': Rename_Task, 'duration_secs': 0.184215} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.710395] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1412.710613] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5d3a47e-a64f-459c-afd0-d2f452468e19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.719081] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1412.719081] env[62510]: value = "task-1768238" [ 1412.719081] env[62510]: _type = "Task" [ 1412.719081] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.728806] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.767784] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1412.768176] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea09315b-ce67-4f57-9740-a507e8203b55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.776459] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1412.776459] env[62510]: value = "task-1768239" [ 1412.776459] env[62510]: _type = "Task" [ 1412.776459] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.793712] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.807688] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.855667] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] Releasing lock "refresh_cache-eb840df4-edc1-44cb-84c9-f31b7b56b6bd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.856040] env[62510]: DEBUG nova.compute.manager [req-a8413a52-38e3-4b9f-a12c-43d425cd613e req-25f796e3-5a8d-417a-b037-4484a15eb461 service nova] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Received event network-vif-deleted-b789c474-95af-4b6c-930a-2ce797a579f6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1412.897851] env[62510]: DEBUG nova.network.neutron [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1412.999466] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768237, 'name': Rename_Task, 'duration_secs': 0.203191} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.000822] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1413.001279] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a5cd2a2-ec48-468f-b60b-afe81414a43c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.013405] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1413.013405] env[62510]: value = "task-1768240" [ 1413.013405] env[62510]: _type = "Task" [ 1413.013405] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.025636] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768240, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.150116] env[62510]: DEBUG nova.compute.utils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1413.157466] env[62510]: DEBUG nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1413.157696] env[62510]: DEBUG nova.network.neutron [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1413.194512] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768235, 'name': CreateVM_Task, 'duration_secs': 0.538521} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.194761] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1413.195697] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.196151] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.196516] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1413.196808] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-936be7d8-a5a6-40e5-8d95-d09093dd7e6f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.207368] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1413.207368] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52714c9f-947e-c14f-4d7f-6e34642daf99" [ 1413.207368] env[62510]: _type = "Task" [ 1413.207368] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.218119] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52714c9f-947e-c14f-4d7f-6e34642daf99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.232609] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768238, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.242082] env[62510]: DEBUG nova.policy [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dcc6b50d1d8f49a9b9017b9532696221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c852b1f21b054fd0b6961685dcf528f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1413.289785] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768239, 'name': PowerOffVM_Task, 'duration_secs': 0.255858} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.290157] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.290456] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1413.292903] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68951d76-16da-4df2-b257-8026b83e25e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.301655] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1413.301842] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73141d3c-0d44-4156-a105-400d50169e1b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.326359] env[62510]: DEBUG nova.network.neutron [-] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.337129] env[62510]: DEBUG nova.network.neutron [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Updating instance_info_cache with network_info: [{"id": "6faa955b-378f-4f0d-9181-22e9295cf131", "address": "fa:16:3e:52:47:b2", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6faa955b-37", "ovs_interfaceid": "6faa955b-378f-4f0d-9181-22e9295cf131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.429235] env[62510]: DEBUG nova.compute.manager [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Received event network-vif-plugged-f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1413.429235] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] Acquiring lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.429235] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.429973] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.430362] env[62510]: DEBUG nova.compute.manager [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] No waiting events found dispatching network-vif-plugged-f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1413.431050] env[62510]: WARNING nova.compute.manager [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Received unexpected event network-vif-plugged-f3011c4d-9d43-4939-9157-df0532a51861 for instance with vm_state building and task_state spawning. [ 1413.431459] env[62510]: DEBUG nova.compute.manager [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Received event network-changed-f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1413.431743] env[62510]: DEBUG nova.compute.manager [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Refreshing instance network info cache due to event network-changed-f3011c4d-9d43-4939-9157-df0532a51861. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1413.432073] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] Acquiring lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.432609] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] Acquired lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.434783] env[62510]: DEBUG nova.network.neutron [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Refreshing network info cache for port f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1413.525314] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768240, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.658747] env[62510]: DEBUG nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1413.727600] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52714c9f-947e-c14f-4d7f-6e34642daf99, 'name': SearchDatastore_Task, 'duration_secs': 0.013703} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.735410] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.735410] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1413.735410] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.735410] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.735410] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.735410] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-376727ee-9e0d-4111-9396-cf459d6b95c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.749208] env[62510]: DEBUG oslo_vmware.api [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768238, 'name': PowerOnVM_Task, 'duration_secs': 0.702773} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.749753] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1413.750130] env[62510]: INFO nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Took 10.61 seconds to spawn the instance on the hypervisor. [ 1413.750452] env[62510]: DEBUG nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1413.751404] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be9701e-4911-44e3-8b8f-555c86ec95e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.770021] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.771025] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1413.771267] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4e9eb76-c284-43a2-9eef-2894b103f583 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.783497] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1413.783497] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a065bb-b6d8-0bf5-2782-f19bf90fc6bf" [ 1413.783497] env[62510]: _type = "Task" [ 1413.783497] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.794332] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a065bb-b6d8-0bf5-2782-f19bf90fc6bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.828629] env[62510]: INFO nova.compute.manager [-] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Took 1.59 seconds to deallocate network for instance. [ 1413.841723] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "refresh_cache-75e06a24-b96c-4a42-bc2d-b0b960e3301a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.841723] env[62510]: DEBUG nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Instance network_info: |[{"id": "6faa955b-378f-4f0d-9181-22e9295cf131", "address": "fa:16:3e:52:47:b2", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6faa955b-37", "ovs_interfaceid": "6faa955b-378f-4f0d-9181-22e9295cf131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1413.841723] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:47:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6faa955b-378f-4f0d-9181-22e9295cf131', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1413.850825] env[62510]: DEBUG oslo.service.loopingcall [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1413.854562] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1413.855018] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6cfbb60-762e-4877-bcb2-056064a46776 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.883878] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1413.883878] env[62510]: value = "task-1768242" [ 1413.883878] env[62510]: _type = "Task" [ 1413.883878] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.895676] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768242, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.027960] env[62510]: DEBUG nova.network.neutron [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Successfully created port: 210d5dee-24d1-4f38-b4b0-d1b78b6180ed {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1414.041394] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768240, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.161196] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2e843d-250a-4da7-88bd-b091e2d7f91e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.183180] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9329c0ff-467a-44cc-858a-da2d0d66eee6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.226374] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ea431c-d9a6-4713-9fa4-8ddb9fdfb01d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.240473] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf6f34b-396b-41f7-b149-a59301faa28f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.275404] env[62510]: DEBUG nova.compute.provider_tree [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1414.292767] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "7cc6d4a6-2765-44e7-b378-e213a562593d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.293372] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.297373] env[62510]: INFO nova.compute.manager [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Took 26.95 seconds to build instance. [ 1414.313443] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a065bb-b6d8-0bf5-2782-f19bf90fc6bf, 'name': SearchDatastore_Task, 'duration_secs': 0.020277} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.314582] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59c31199-04d0-415c-8222-016e89b94ecb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.325123] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1414.325123] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5210135a-a639-1210-e6f2-70a02d26fe01" [ 1414.325123] env[62510]: _type = "Task" [ 1414.325123] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.342117] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.343522] env[62510]: DEBUG nova.compute.manager [None req-c00879f2-fe7a-45b1-8131-f464f5d772ac tempest-ServerDiagnosticsTest-1604419077 tempest-ServerDiagnosticsTest-1604419077-project-admin] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1414.344159] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5210135a-a639-1210-e6f2-70a02d26fe01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.345026] env[62510]: DEBUG nova.network.neutron [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Successfully updated port: f4736e99-c658-4d4e-ace8-a3b4552f43bf {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1414.346624] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d3747d-e8fb-489e-ba88-03ffeba4274b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.360598] env[62510]: INFO nova.compute.manager [None req-c00879f2-fe7a-45b1-8131-f464f5d772ac tempest-ServerDiagnosticsTest-1604419077 tempest-ServerDiagnosticsTest-1604419077-project-admin] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Retrieving diagnostics [ 1414.360665] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fa6e48-5df1-4a96-ab2c-cf44d6c1e8ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.398461] env[62510]: DEBUG nova.network.neutron [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Updated VIF entry in instance network info cache for port f3011c4d-9d43-4939-9157-df0532a51861. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1414.398615] env[62510]: DEBUG nova.network.neutron [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Updating instance_info_cache with network_info: [{"id": "f3011c4d-9d43-4939-9157-df0532a51861", "address": "fa:16:3e:33:a9:3e", "network": {"id": "1e2d2394-0caf-483c-8fdd-819cbb3d155f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-79341046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44fbb61ad5364e0cb30d884cf96fe671", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3011c4d-9d", "ovs_interfaceid": "f3011c4d-9d43-4939-9157-df0532a51861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.410531] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768242, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.530139] env[62510]: DEBUG oslo_vmware.api [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768240, 'name': PowerOnVM_Task, 'duration_secs': 1.116029} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.530506] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1414.530646] env[62510]: INFO nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Took 18.83 seconds to spawn the instance on the hypervisor. [ 1414.530824] env[62510]: DEBUG nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1414.531873] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc91f4d-9241-4179-a7d3-b422c56ae38c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.681010] env[62510]: DEBUG nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1414.713485] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1414.715010] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1414.715841] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1414.715915] env[62510]: DEBUG nova.virt.hardware [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1414.716843] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1617152b-ed10-456b-a108-531c32a3f45d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.727125] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e516dcc-be3b-47b8-bed8-805548000115 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.791095] env[62510]: DEBUG nova.scheduler.client.report [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1414.804665] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5bff950d-1fb8-40da-b2a6-906e802143d2 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.463s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.836179] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5210135a-a639-1210-e6f2-70a02d26fe01, 'name': SearchDatastore_Task, 'duration_secs': 0.02075} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.836519] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.836814] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 612e95d6-28ef-4c9a-b5d9-fd83122bfa44/612e95d6-28ef-4c9a-b5d9-fd83122bfa44.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1414.837118] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8974e5f5-329b-462b-aa57-a066a9aec3c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.845109] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1414.845109] env[62510]: value = "task-1768243" [ 1414.845109] env[62510]: _type = "Task" [ 1414.845109] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.853594] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.853749] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.853922] env[62510]: DEBUG nova.network.neutron [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.855424] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.869594] env[62510]: DEBUG nova.compute.manager [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Received event network-vif-plugged-6faa955b-378f-4f0d-9181-22e9295cf131 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1414.869763] env[62510]: DEBUG oslo_concurrency.lockutils [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] Acquiring lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.869881] env[62510]: DEBUG oslo_concurrency.lockutils [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.870043] env[62510]: DEBUG oslo_concurrency.lockutils [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.870198] env[62510]: DEBUG nova.compute.manager [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] No waiting events found dispatching network-vif-plugged-6faa955b-378f-4f0d-9181-22e9295cf131 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1414.870559] env[62510]: WARNING nova.compute.manager [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Received unexpected event network-vif-plugged-6faa955b-378f-4f0d-9181-22e9295cf131 for instance with vm_state building and task_state spawning. [ 1414.870559] env[62510]: DEBUG nova.compute.manager [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Received event network-changed-6faa955b-378f-4f0d-9181-22e9295cf131 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1414.870653] env[62510]: DEBUG nova.compute.manager [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Refreshing instance network info cache due to event network-changed-6faa955b-378f-4f0d-9181-22e9295cf131. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1414.870842] env[62510]: DEBUG oslo_concurrency.lockutils [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] Acquiring lock "refresh_cache-75e06a24-b96c-4a42-bc2d-b0b960e3301a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.871106] env[62510]: DEBUG oslo_concurrency.lockutils [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] Acquired lock "refresh_cache-75e06a24-b96c-4a42-bc2d-b0b960e3301a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.871295] env[62510]: DEBUG nova.network.neutron [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Refreshing network info cache for port 6faa955b-378f-4f0d-9181-22e9295cf131 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1414.904902] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf49e077-d219-4548-8462-9e53ff17cd94 req-aa92a41b-64d2-4800-800a-a8dc7437d0e6 service nova] Releasing lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.905268] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768242, 'name': CreateVM_Task, 'duration_secs': 0.553849} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.905699] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1414.906662] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.906662] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.906907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1414.907084] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-611324d3-eff8-4cd2-9c04-2fdac7a8927b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.912097] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1414.912097] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5248f235-a3b8-36a3-d7de-e68062a30b29" [ 1414.912097] env[62510]: _type = "Task" [ 1414.912097] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.920507] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5248f235-a3b8-36a3-d7de-e68062a30b29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.052159] env[62510]: INFO nova.compute.manager [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Took 28.55 seconds to build instance. [ 1415.297114] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.297793] env[62510]: DEBUG nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1415.301972] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.216s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.304320] env[62510]: INFO nova.compute.claims [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.307505] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1415.359843] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768243, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.425798] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5248f235-a3b8-36a3-d7de-e68062a30b29, 'name': SearchDatastore_Task, 'duration_secs': 0.019064} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.426127] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.426377] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.426729] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.426818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.427035] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.427444] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d6b8fa5-e45b-4ffa-88c0-2c9511e0573d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.448178] env[62510]: DEBUG nova.network.neutron [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1415.554959] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3fc05c1c-c7a7-4956-a3f1-36baeeb9cdde tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.085s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.644092] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "d42295c9-2b0e-471e-9a87-1d7367de9588" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.644243] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.745902] env[62510]: DEBUG nova.network.neutron [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updating instance_info_cache with network_info: [{"id": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "address": "fa:16:3e:11:18:84", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4736e99-c6", "ovs_interfaceid": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.751698] env[62510]: DEBUG nova.network.neutron [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Updated VIF entry in instance network info cache for port 6faa955b-378f-4f0d-9181-22e9295cf131. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1415.752049] env[62510]: DEBUG nova.network.neutron [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Updating instance_info_cache with network_info: [{"id": "6faa955b-378f-4f0d-9181-22e9295cf131", "address": "fa:16:3e:52:47:b2", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6faa955b-37", "ovs_interfaceid": "6faa955b-378f-4f0d-9181-22e9295cf131", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.809558] env[62510]: DEBUG nova.compute.utils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1415.811120] env[62510]: DEBUG nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1415.811370] env[62510]: DEBUG nova.network.neutron [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1415.839131] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1415.866603] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768243, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.897818] env[62510]: DEBUG nova.policy [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6decc076b3da4d1b86c6aa73f1cf2674', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86abf24d608d4c438161dc0b8335dea1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1416.059342] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1416.219477] env[62510]: DEBUG nova.network.neutron [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Successfully updated port: 210d5dee-24d1-4f38-b4b0-d1b78b6180ed {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1416.249089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.249557] env[62510]: DEBUG nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Instance network_info: |[{"id": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "address": "fa:16:3e:11:18:84", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4736e99-c6", "ovs_interfaceid": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1416.250042] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:18:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4736e99-c658-4d4e-ace8-a3b4552f43bf', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1416.262825] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating folder: Project (94a46473611d4b22be7c66c909d1b348). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1416.263760] env[62510]: DEBUG oslo_concurrency.lockutils [req-5c95ddcf-cef9-475d-9ffe-23c5fe9ab43e req-8b4fd3ff-9bed-45ea-a2c6-f860eaa20405 service nova] Releasing lock "refresh_cache-75e06a24-b96c-4a42-bc2d-b0b960e3301a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.264393] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bea6d51a-f17c-467a-afe3-1a33da5b7646 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.282573] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created folder: Project (94a46473611d4b22be7c66c909d1b348) in parent group-v367197. [ 1416.282573] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating folder: Instances. Parent ref: group-v367226. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1416.282573] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d477a2b8-97db-4048-88d1-93391bab4bcd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.295018] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created folder: Instances in parent group-v367226. [ 1416.295296] env[62510]: DEBUG oslo.service.loopingcall [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1416.295540] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1416.295778] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41a4bed2-5140-46e9-bbb7-fbdef3c83a5b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.319867] env[62510]: DEBUG nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1416.322032] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1416.322032] env[62510]: value = "task-1768246" [ 1416.322032] env[62510]: _type = "Task" [ 1416.322032] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.335437] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768246, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.360050] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768243, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.474452] env[62510]: DEBUG nova.network.neutron [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Successfully created port: a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1416.588476] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.716225] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59842600-0e43-43b9-919e-1378740d9c0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.724527] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "refresh_cache-35a98028-0fc6-4e13-b50d-5dacf205dbe5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.724779] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "refresh_cache-35a98028-0fc6-4e13-b50d-5dacf205dbe5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.724922] env[62510]: DEBUG nova.network.neutron [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.731097] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8404ecd3-c046-44c2-948b-859d89b723e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.764616] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ebabfa-6165-41a4-8136-522026e3bed4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.772934] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ef4f91-a536-4d3b-8358-422da454f818 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.791042] env[62510]: DEBUG nova.compute.provider_tree [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.837209] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768246, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.860937] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768243, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.995774] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1416.996687] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1416.996936] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d91fa98-ebd8-4376-b0ba-c454ed0dddf5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.006538] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1417.006538] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52232703-974c-c467-cf66-a3b1dfaf2641" [ 1417.006538] env[62510]: _type = "Task" [ 1417.006538] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.013832] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1417.014811] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1417.014811] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleting the datastore file [datastore1] 5588650b-c450-489a-a456-3b580a5b9114 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1417.015644] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6850fd9f-eff6-4f1b-8c0e-1d2808e1413a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.022507] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52232703-974c-c467-cf66-a3b1dfaf2641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.030701] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1417.030701] env[62510]: value = "task-1768247" [ 1417.030701] env[62510]: _type = "Task" [ 1417.030701] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.038393] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768247, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.280931] env[62510]: DEBUG nova.network.neutron [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1417.297020] env[62510]: DEBUG nova.scheduler.client.report [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1417.334938] env[62510]: DEBUG nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1417.343209] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768246, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.367414] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768243, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.364067} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.367918] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 612e95d6-28ef-4c9a-b5d9-fd83122bfa44/612e95d6-28ef-4c9a-b5d9-fd83122bfa44.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1417.368174] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1417.368439] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe2e4e2f-fe2e-4662-8fe8-f7b41db5f8fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.376848] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1417.377108] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1417.377264] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1417.377443] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1417.377588] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1417.377816] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1417.380241] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1417.380556] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1417.380900] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1417.381157] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1417.381419] env[62510]: DEBUG nova.virt.hardware [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1417.383158] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4726b8e9-4433-4c77-b79a-a5c20c79c47b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.387414] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1417.387414] env[62510]: value = "task-1768248" [ 1417.387414] env[62510]: _type = "Task" [ 1417.387414] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.395239] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c033d12a-c71a-4a2a-bbc5-1471ba49ab91 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.403264] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "585784c5-b56a-435d-8b22-53bc5cb39b25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.403957] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.403957] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "585784c5-b56a-435d-8b22-53bc5cb39b25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.403957] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.404259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.406132] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768248, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.406609] env[62510]: INFO nova.compute.manager [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Terminating instance [ 1417.512663] env[62510]: DEBUG nova.network.neutron [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Updating instance_info_cache with network_info: [{"id": "210d5dee-24d1-4f38-b4b0-d1b78b6180ed", "address": "fa:16:3e:13:c0:2d", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap210d5dee-24", "ovs_interfaceid": "210d5dee-24d1-4f38-b4b0-d1b78b6180ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.523319] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52232703-974c-c467-cf66-a3b1dfaf2641, 'name': SearchDatastore_Task, 'duration_secs': 0.057607} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.523319] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe734020-e61f-407b-a6e7-c11112609a15 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.527881] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1417.527881] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d14d21-6032-d0b9-8303-d317865fb312" [ 1417.527881] env[62510]: _type = "Task" [ 1417.527881] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.540167] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d14d21-6032-d0b9-8303-d317865fb312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.543843] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768247, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330177} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.543972] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1417.544185] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1417.544411] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1417.808019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.808019] env[62510]: DEBUG nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1417.809103] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 10.308s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.845097] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768246, 'name': CreateVM_Task, 'duration_secs': 1.168118} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.845370] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1417.846657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.846830] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.847217] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1417.847494] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e258c3f-00ef-44ef-89d2-15a0ca35f6f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.854177] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1417.854177] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ba9ae1-faef-4c8d-cc47-fb4ba49cd9c3" [ 1417.854177] env[62510]: _type = "Task" [ 1417.854177] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.865477] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ba9ae1-faef-4c8d-cc47-fb4ba49cd9c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.879334] env[62510]: DEBUG nova.compute.manager [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Received event network-vif-deleted-55c9d2e9-7fb5-4e9d-8071-fe23efeedb7c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1417.879334] env[62510]: DEBUG nova.compute.manager [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Received event network-vif-plugged-f4736e99-c658-4d4e-ace8-a3b4552f43bf {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1417.879509] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] Acquiring lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.879668] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.880152] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.880518] env[62510]: DEBUG nova.compute.manager [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] No waiting events found dispatching network-vif-plugged-f4736e99-c658-4d4e-ace8-a3b4552f43bf {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1417.880726] env[62510]: WARNING nova.compute.manager [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Received unexpected event network-vif-plugged-f4736e99-c658-4d4e-ace8-a3b4552f43bf for instance with vm_state building and task_state spawning. [ 1417.880893] env[62510]: DEBUG nova.compute.manager [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Received event network-changed-f4736e99-c658-4d4e-ace8-a3b4552f43bf {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1417.881159] env[62510]: DEBUG nova.compute.manager [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Refreshing instance network info cache due to event network-changed-f4736e99-c658-4d4e-ace8-a3b4552f43bf. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1417.881369] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] Acquiring lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.881506] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] Acquired lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.881886] env[62510]: DEBUG nova.network.neutron [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Refreshing network info cache for port f4736e99-c658-4d4e-ace8-a3b4552f43bf {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1417.905708] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768248, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081377} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.905835] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1417.908878] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e62000-cd82-49bb-ae2d-2e5eab651103 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.923846] env[62510]: DEBUG nova.compute.manager [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1417.924090] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1417.934550] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 612e95d6-28ef-4c9a-b5d9-fd83122bfa44/612e95d6-28ef-4c9a-b5d9-fd83122bfa44.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1417.935410] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4efd91-b3d1-46c9-a31e-8024f5c12157 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.938177] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53a2eb71-2e27-4d1d-a91d-44dd9feca2da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.959235] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1417.960584] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af1ecc1b-e4ca-46ef-b341-fe0c7e87e76f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.962249] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1417.962249] env[62510]: value = "task-1768249" [ 1417.962249] env[62510]: _type = "Task" [ 1417.962249] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.966767] env[62510]: DEBUG oslo_vmware.api [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1417.966767] env[62510]: value = "task-1768250" [ 1417.966767] env[62510]: _type = "Task" [ 1417.966767] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.974131] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768249, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.979017] env[62510]: DEBUG oslo_vmware.api [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768250, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.015349] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "refresh_cache-35a98028-0fc6-4e13-b50d-5dacf205dbe5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.015774] env[62510]: DEBUG nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Instance network_info: |[{"id": "210d5dee-24d1-4f38-b4b0-d1b78b6180ed", "address": "fa:16:3e:13:c0:2d", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap210d5dee-24", "ovs_interfaceid": "210d5dee-24d1-4f38-b4b0-d1b78b6180ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1418.016286] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:c0:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19671de9-8b5b-4710-adc3-7419f3c0f171', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '210d5dee-24d1-4f38-b4b0-d1b78b6180ed', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1418.025734] env[62510]: DEBUG oslo.service.loopingcall [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.026277] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1418.026591] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02bdff07-9620-4a2a-82f8-2f15ccd68840 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.059266] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1418.059266] env[62510]: value = "task-1768251" [ 1418.059266] env[62510]: _type = "Task" [ 1418.059266] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.059266] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d14d21-6032-d0b9-8303-d317865fb312, 'name': SearchDatastore_Task, 'duration_secs': 0.012061} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.059266] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.059266] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 75e06a24-b96c-4a42-bc2d-b0b960e3301a/75e06a24-b96c-4a42-bc2d-b0b960e3301a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1418.060827] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-671a867e-a4be-4c91-a277-fcf5101423bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.070976] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1418.070976] env[62510]: value = "task-1768252" [ 1418.070976] env[62510]: _type = "Task" [ 1418.070976] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.079121] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768252, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.118641] env[62510]: DEBUG nova.network.neutron [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Successfully updated port: a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1418.235336] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.235336] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.326146] env[62510]: DEBUG nova.compute.utils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1418.327687] env[62510]: DEBUG nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1418.327886] env[62510]: DEBUG nova.network.neutron [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1418.372130] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ba9ae1-faef-4c8d-cc47-fb4ba49cd9c3, 'name': SearchDatastore_Task, 'duration_secs': 0.029007} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.372502] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.372703] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1418.372940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.373093] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.373276] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1418.373714] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed60fd29-8a04-4080-92df-015a8ab81011 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.389516] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1418.389707] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1418.390524] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d5126f6-7e3f-4495-9d36-702bc1ef5c0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.397785] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1418.397785] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528462dd-bc1e-4885-0818-10323aba9032" [ 1418.397785] env[62510]: _type = "Task" [ 1418.397785] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.407663] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528462dd-bc1e-4885-0818-10323aba9032, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.474902] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768249, 'name': ReconfigVM_Task, 'duration_secs': 0.413938} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.475702] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 612e95d6-28ef-4c9a-b5d9-fd83122bfa44/612e95d6-28ef-4c9a-b5d9-fd83122bfa44.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1418.476308] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e9bf4f3-ed9a-463f-9fa9-8b8b64800350 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.481390] env[62510]: DEBUG oslo_vmware.api [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768250, 'name': PowerOffVM_Task, 'duration_secs': 0.238071} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.482107] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1418.482878] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1418.482878] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2ec6607-3362-49da-91d0-e99b952c6506 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.487047] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1418.487047] env[62510]: value = "task-1768253" [ 1418.487047] env[62510]: _type = "Task" [ 1418.487047] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.493920] env[62510]: DEBUG nova.policy [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5050179d59b45d7a3e63c7d090e9181', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b2875fef23d486a900e5909a704c64b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1418.499272] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768253, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.530920] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.531702] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.568669] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1418.568972] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1418.569111] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Deleting the datastore file [datastore1] 585784c5-b56a-435d-8b22-53bc5cb39b25 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1418.569399] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9d4102b-2d63-45d2-af15-4cf927533b8b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.584516] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768251, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.589070] env[62510]: DEBUG oslo_vmware.api [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for the task: (returnval){ [ 1418.589070] env[62510]: value = "task-1768255" [ 1418.589070] env[62510]: _type = "Task" [ 1418.589070] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.598201] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768252, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.603421] env[62510]: DEBUG oslo_vmware.api [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.615404] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1418.615632] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1418.615895] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1418.616010] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1418.618017] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1418.618017] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1418.618017] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1418.618017] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1418.618017] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1418.618017] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1418.618017] env[62510]: DEBUG nova.virt.hardware [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1418.618338] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fb503a-565d-4761-85ad-3f10a89239df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.624158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.624301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.624478] env[62510]: DEBUG nova.network.neutron [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1418.630636] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d8184e-db7a-4746-9214-cff3363ac7f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.647405] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:1f:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd87267d7-ec03-4d4a-a31a-9cb46a459d3c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1418.655189] env[62510]: DEBUG oslo.service.loopingcall [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.655873] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1418.656357] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b606af7e-fa92-4d4e-8145-701a6295927c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.677718] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1418.677718] env[62510]: value = "task-1768256" [ 1418.677718] env[62510]: _type = "Task" [ 1418.677718] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.687238] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768256, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.829986] env[62510]: INFO nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating resource usage from migration a3d89ea5-941d-4795-af70-8061e49c8be5 [ 1418.832672] env[62510]: DEBUG nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1418.910514] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528462dd-bc1e-4885-0818-10323aba9032, 'name': SearchDatastore_Task, 'duration_secs': 0.074707} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.911293] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eede93e-58df-4508-8c71-7db81ca302de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.916304] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1418.916304] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527cc199-06d8-698e-fed4-dd4d3eded681" [ 1418.916304] env[62510]: _type = "Task" [ 1418.916304] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.923846] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527cc199-06d8-698e-fed4-dd4d3eded681, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.935274] env[62510]: DEBUG nova.network.neutron [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updated VIF entry in instance network info cache for port f4736e99-c658-4d4e-ace8-a3b4552f43bf. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1418.935600] env[62510]: DEBUG nova.network.neutron [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updating instance_info_cache with network_info: [{"id": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "address": "fa:16:3e:11:18:84", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4736e99-c6", "ovs_interfaceid": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.999785] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768253, 'name': Rename_Task, 'duration_secs': 0.273233} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.000134] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.000415] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73f8148f-a11f-42d6-aa09-747a336719ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.008871] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1419.008871] env[62510]: value = "task-1768257" [ 1419.008871] env[62510]: _type = "Task" [ 1419.008871] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.017965] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.070451] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768251, 'name': CreateVM_Task, 'duration_secs': 0.679213} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.070706] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.071313] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.071469] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.071890] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1419.072168] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1c6cdab-a30a-4c2b-814a-f45c723caf42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.083021] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768252, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64021} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.084063] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 75e06a24-b96c-4a42-bc2d-b0b960e3301a/75e06a24-b96c-4a42-bc2d-b0b960e3301a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1419.084499] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1419.084831] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1419.084831] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e14130-adae-ce78-332a-e61870fe3918" [ 1419.084831] env[62510]: _type = "Task" [ 1419.084831] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.085229] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e36c6463-6c06-4899-a1af-8d9e5afa49eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.096989] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1419.096989] env[62510]: value = "task-1768258" [ 1419.096989] env[62510]: _type = "Task" [ 1419.096989] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.100887] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e14130-adae-ce78-332a-e61870fe3918, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.107636] env[62510]: DEBUG oslo_vmware.api [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768255, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.112396] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768258, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.187694] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768256, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.249151] env[62510]: DEBUG nova.network.neutron [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1419.361702] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance b7c2c768-573b-4c1c-ade7-45fb87b95d41 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1419.433896] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527cc199-06d8-698e-fed4-dd4d3eded681, 'name': SearchDatastore_Task, 'duration_secs': 0.04291} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.437018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.437018] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8ffa27e9-6a3b-48d1-aed4-c808089788d9/8ffa27e9-6a3b-48d1-aed4-c808089788d9.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1419.437018] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8fffc10-8661-4942-8a46-166878eda021 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.438406] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9778fd8-9388-4667-84e8-c22d0ee23149 req-89a762dd-e019-488b-a384-33d7894b179c service nova] Releasing lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.446175] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1419.446175] env[62510]: value = "task-1768259" [ 1419.446175] env[62510]: _type = "Task" [ 1419.446175] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.455913] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.523921] env[62510]: DEBUG oslo_vmware.api [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768257, 'name': PowerOnVM_Task, 'duration_secs': 0.493716} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.523921] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.523921] env[62510]: INFO nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Took 12.57 seconds to spawn the instance on the hypervisor. [ 1419.523921] env[62510]: DEBUG nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1419.523921] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96785fdf-6f9f-4304-b831-027253a7288e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.610577] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e14130-adae-ce78-332a-e61870fe3918, 'name': SearchDatastore_Task, 'duration_secs': 0.022806} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.610856] env[62510]: DEBUG oslo_vmware.api [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Task: {'id': task-1768255, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.595514} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.615929] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.615929] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1419.615929] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.615929] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.616165] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1419.616314] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1419.616478] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1419.616710] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1419.616861] env[62510]: INFO nova.compute.manager [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1419.617298] env[62510]: DEBUG oslo.service.loopingcall [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1419.618351] env[62510]: DEBUG nova.network.neutron [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [{"id": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "address": "fa:16:3e:7d:cb:3f", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e31bab-04", "ovs_interfaceid": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.619729] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6098d42-2738-4185-b42c-0a447103a0c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.621591] env[62510]: DEBUG nova.compute.manager [-] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1419.621684] env[62510]: DEBUG nova.network.neutron [-] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1419.624054] env[62510]: DEBUG nova.network.neutron [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Successfully created port: dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1419.634581] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768258, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084556} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.634581] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.634794] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2eeda0-df80-470e-90cf-92ba35df0413 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.639993] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1419.639993] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1419.641750] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab2fc6a0-57c6-4bf7-8a80-4176c7254f7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.662515] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 75e06a24-b96c-4a42-bc2d-b0b960e3301a/75e06a24-b96c-4a42-bc2d-b0b960e3301a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.663742] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-857edc5f-1654-42de-852d-6d094c81b531 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.679792] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1419.679792] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52752eb6-f34e-5ce1-be7c-1bf3657809f7" [ 1419.679792] env[62510]: _type = "Task" [ 1419.679792] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.688282] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1419.688282] env[62510]: value = "task-1768260" [ 1419.688282] env[62510]: _type = "Task" [ 1419.688282] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.697177] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768256, 'name': CreateVM_Task, 'duration_secs': 0.689615} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.697737] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52752eb6-f34e-5ce1-be7c-1bf3657809f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.698560] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.698893] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.699065] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.699369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1419.699607] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15a0e693-43cf-4e34-a140-84c9f048bbc7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.704127] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768260, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.710029] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1419.710029] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5222fc53-589a-c10a-dca2-30fa070c1aed" [ 1419.710029] env[62510]: _type = "Task" [ 1419.710029] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.715270] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5222fc53-589a-c10a-dca2-30fa070c1aed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.842909] env[62510]: DEBUG nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1419.866596] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8bbafd7f-cdd1-4246-a509-2f97a6f78497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1419.866762] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0604d37b-38c5-4510-894e-b26fd44e17c5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1419.866933] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 34a464e2-d38e-4c24-a487-c62a4f484667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.867111] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 585784c5-b56a-435d-8b22-53bc5cb39b25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.867315] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 731e7110-9709-4c4e-96d2-00e21e67c6e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.867480] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 612e95d6-28ef-4c9a-b5d9-fd83122bfa44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.868038] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.868038] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8ffa27e9-6a3b-48d1-aed4-c808089788d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1419.894063] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1419.894398] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1419.894549] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.894668] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1419.894881] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.896068] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1419.896430] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1419.896539] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1419.896777] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1419.897615] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1419.897764] env[62510]: DEBUG nova.virt.hardware [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1419.898778] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8f84eb-5066-4db9-9685-16f81ca9d8f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.921810] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94621e0-af9d-4b2a-8af4-8291195378a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.934496] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.934945] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0029d975-bd48-4558-9f41-a0cf91336393" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.970797] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768259, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.047206] env[62510]: INFO nova.compute.manager [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Took 30.50 seconds to build instance. [ 1420.124460] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.124583] env[62510]: DEBUG nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Instance network_info: |[{"id": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "address": "fa:16:3e:7d:cb:3f", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e31bab-04", "ovs_interfaceid": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1420.125108] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:cb:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6e31bab-0459-42fe-8756-d37cc3fa3e88', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1420.132862] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating folder: Project (86abf24d608d4c438161dc0b8335dea1). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1420.133551] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-588c3e5d-7cb5-44bc-87d3-62747eb69cec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.146740] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created folder: Project (86abf24d608d4c438161dc0b8335dea1) in parent group-v367197. [ 1420.147016] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating folder: Instances. Parent ref: group-v367231. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1420.147297] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2653df4b-e7c3-41da-b841-4504ce847f4c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.160636] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created folder: Instances in parent group-v367231. [ 1420.160952] env[62510]: DEBUG oslo.service.loopingcall [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1420.162756] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1420.162756] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b3b826f-fe6e-4cd8-a6d1-49bf68ca6c6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.184422] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1420.184422] env[62510]: value = "task-1768263" [ 1420.184422] env[62510]: _type = "Task" [ 1420.184422] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.200770] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768263, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.208264] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52752eb6-f34e-5ce1-be7c-1bf3657809f7, 'name': SearchDatastore_Task, 'duration_secs': 0.043932} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.208264] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768260, 'name': ReconfigVM_Task, 'duration_secs': 0.372683} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.214424] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 75e06a24-b96c-4a42-bc2d-b0b960e3301a/75e06a24-b96c-4a42-bc2d-b0b960e3301a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1420.214424] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72d4c263-06ea-4a73-9504-9fa5dde5f4e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.215305] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1387aa94-e49d-4909-9664-0bf5a545b15b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.226222] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1420.226222] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52219f94-d241-2a20-24d3-6a7fd7f9e026" [ 1420.226222] env[62510]: _type = "Task" [ 1420.226222] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.226529] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5222fc53-589a-c10a-dca2-30fa070c1aed, 'name': SearchDatastore_Task, 'duration_secs': 0.021878} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.227867] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.228150] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1420.228390] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.229070] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1420.229070] env[62510]: value = "task-1768264" [ 1420.229070] env[62510]: _type = "Task" [ 1420.229070] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.241312] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52219f94-d241-2a20-24d3-6a7fd7f9e026, 'name': SearchDatastore_Task, 'duration_secs': 0.011785} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.245163] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.247578] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 35a98028-0fc6-4e13-b50d-5dacf205dbe5/35a98028-0fc6-4e13-b50d-5dacf205dbe5.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1420.247578] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768264, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.247578] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.247578] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.247578] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b30c072-28f9-433b-9736-8845958a15fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.250793] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02cc72ed-32bb-4308-bb3b-33e794246c80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.260718] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1420.260718] env[62510]: value = "task-1768265" [ 1420.260718] env[62510]: _type = "Task" [ 1420.260718] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.261052] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.261097] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.265210] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8132085e-334d-4580-94b3-eb3f6b345919 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.273413] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768265, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.274704] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1420.274704] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5227f76c-b6df-4070-bbf0-8edac9581bf1" [ 1420.274704] env[62510]: _type = "Task" [ 1420.274704] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.283179] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5227f76c-b6df-4070-bbf0-8edac9581bf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.375174] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 26b283b0-98b4-4a15-abe0-fbf97e1f49eb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1420.471304] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768259, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.782067} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.471304] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8ffa27e9-6a3b-48d1-aed4-c808089788d9/8ffa27e9-6a3b-48d1-aed4-c808089788d9.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1420.471304] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1420.471659] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3241177-a2bc-4f30-9e76-144b5d27c119 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.480040] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1420.480040] env[62510]: value = "task-1768266" [ 1420.480040] env[62510]: _type = "Task" [ 1420.480040] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.498444] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.550402] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1dddf0da-c924-40e8-adc0-530796f0a991 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.009s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.559804] env[62510]: DEBUG nova.network.neutron [-] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1420.699072] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768263, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.744123] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768264, 'name': Rename_Task, 'duration_secs': 0.172097} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.744491] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1420.744809] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-213fd12c-6a46-4f95-835e-a3a32fd63fb0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.753597] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1420.753597] env[62510]: value = "task-1768267" [ 1420.753597] env[62510]: _type = "Task" [ 1420.753597] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.763553] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768267, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.773168] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768265, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.784230] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5227f76c-b6df-4070-bbf0-8edac9581bf1, 'name': SearchDatastore_Task, 'duration_secs': 0.011128} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.785620] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0907c2f-7632-4df8-953c-ebb5e37a2d53 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.790649] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1420.790649] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5264ba96-0fff-a6bb-4def-cbcba18ee26c" [ 1420.790649] env[62510]: _type = "Task" [ 1420.790649] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.799437] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5264ba96-0fff-a6bb-4def-cbcba18ee26c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.878335] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9a1a0428-8ccd-4614-8853-ef3eeec23d55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1420.991407] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170334} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.991796] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1420.992679] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06809a63-8202-46d2-b2a0-f23f0a59177b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.016515] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 8ffa27e9-6a3b-48d1-aed4-c808089788d9/8ffa27e9-6a3b-48d1-aed4-c808089788d9.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1421.016733] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39ddd287-0ace-4f3e-b61a-105ede751586 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.040055] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.040055] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.040055] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1421.040055] env[62510]: value = "task-1768268" [ 1421.040055] env[62510]: _type = "Task" [ 1421.040055] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.048426] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.056689] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1421.062477] env[62510]: INFO nova.compute.manager [-] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Took 1.44 seconds to deallocate network for instance. [ 1421.142993] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Received event network-vif-plugged-210d5dee-24d1-4f38-b4b0-d1b78b6180ed {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1421.143152] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Acquiring lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.143410] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.143624] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.143923] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] No waiting events found dispatching network-vif-plugged-210d5dee-24d1-4f38-b4b0-d1b78b6180ed {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1421.144161] env[62510]: WARNING nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Received unexpected event network-vif-plugged-210d5dee-24d1-4f38-b4b0-d1b78b6180ed for instance with vm_state building and task_state spawning. [ 1421.144368] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Received event network-changed-210d5dee-24d1-4f38-b4b0-d1b78b6180ed {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1421.144621] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Refreshing instance network info cache due to event network-changed-210d5dee-24d1-4f38-b4b0-d1b78b6180ed. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1421.144921] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Acquiring lock "refresh_cache-35a98028-0fc6-4e13-b50d-5dacf205dbe5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.145179] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Acquired lock "refresh_cache-35a98028-0fc6-4e13-b50d-5dacf205dbe5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.145477] env[62510]: DEBUG nova.network.neutron [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Refreshing network info cache for port 210d5dee-24d1-4f38-b4b0-d1b78b6180ed {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1421.197237] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768263, 'name': CreateVM_Task, 'duration_secs': 0.519127} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.197453] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1421.198199] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.198412] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.198791] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1421.199113] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-883fbb88-b25e-4c13-a164-0d47f15eb085 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.204221] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1421.204221] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a624e8-d447-12b7-9524-b33ee44a506b" [ 1421.204221] env[62510]: _type = "Task" [ 1421.204221] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.218208] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a624e8-d447-12b7-9524-b33ee44a506b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.266526] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768267, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.275165] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768265, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.275382] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 35a98028-0fc6-4e13-b50d-5dacf205dbe5/35a98028-0fc6-4e13-b50d-5dacf205dbe5.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1421.275676] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1421.275879] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a45391af-1203-4594-a981-de13df0d679f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.282489] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1421.282489] env[62510]: value = "task-1768269" [ 1421.282489] env[62510]: _type = "Task" [ 1421.282489] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.291667] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768269, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.302621] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5264ba96-0fff-a6bb-4def-cbcba18ee26c, 'name': SearchDatastore_Task, 'duration_secs': 0.040287} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.303162] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.303253] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1421.303550] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18fe013b-eda8-4edf-b1f5-1e8baf912f6f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.312112] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1421.312112] env[62510]: value = "task-1768270" [ 1421.312112] env[62510]: _type = "Task" [ 1421.312112] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.320783] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.381960] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance c58184e7-bf4f-406b-a778-9b8f60740fe6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.382181] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 3266d254-4a75-4fd3-b4e7-ebeb86467cbe is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1421.451649] env[62510]: DEBUG nova.compute.manager [None req-7fe8b7a8-2dcb-4417-ab7d-b757eb60d9ee tempest-ServerDiagnosticsV248Test-363593752 tempest-ServerDiagnosticsV248Test-363593752-project-admin] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1421.453273] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b39b84-fa2d-4965-8c26-7d2c73cb69c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.460778] env[62510]: INFO nova.compute.manager [None req-7fe8b7a8-2dcb-4417-ab7d-b757eb60d9ee tempest-ServerDiagnosticsV248Test-363593752 tempest-ServerDiagnosticsV248Test-363593752-project-admin] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Retrieving diagnostics [ 1421.462215] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0f6108-254c-498d-904a-ea908e1c5fd5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.550362] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.568456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.582933] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.672937] env[62510]: DEBUG nova.network.neutron [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Successfully updated port: dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1421.716409] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.716650] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.716851] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.717040] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.717211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.718799] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a624e8-d447-12b7-9524-b33ee44a506b, 'name': SearchDatastore_Task, 'duration_secs': 0.367076} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.719254] env[62510]: INFO nova.compute.manager [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Terminating instance [ 1421.722483] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.722890] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1421.722975] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.723065] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.723243] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1421.726665] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-359d1e71-2f0a-430e-bad3-a4826a1c4a30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.737543] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1421.737543] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1421.738592] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-337b11bb-1fe2-40f7-bb7c-e0a357ebd35b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.746055] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1421.746055] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d50447-9890-e2a3-ef02-0d3bc7e30381" [ 1421.746055] env[62510]: _type = "Task" [ 1421.746055] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.751955] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d50447-9890-e2a3-ef02-0d3bc7e30381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.762829] env[62510]: DEBUG oslo_vmware.api [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768267, 'name': PowerOnVM_Task, 'duration_secs': 0.930116} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.763103] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1421.763307] env[62510]: INFO nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Took 12.34 seconds to spawn the instance on the hypervisor. [ 1421.763494] env[62510]: DEBUG nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1421.764299] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732942e3-2a82-4a04-a3ef-6ff84f540754 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.795630] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768269, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.499763} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.795630] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1421.795630] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1048bf26-c6f7-4e0a-a3bf-290cfb758096 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.822393] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 35a98028-0fc6-4e13-b50d-5dacf205dbe5/35a98028-0fc6-4e13-b50d-5dacf205dbe5.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1421.825427] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7aa3c33-03b1-41a2-9db0-8267f3cb83e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.849864] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.851405] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1421.851405] env[62510]: value = "task-1768271" [ 1421.851405] env[62510]: _type = "Task" [ 1421.851405] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.859195] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768271, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.887091] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance cfe53f9c-d78b-4af7-b991-f3549c03f22d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1422.055605] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.124674] env[62510]: DEBUG nova.network.neutron [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Updated VIF entry in instance network info cache for port 210d5dee-24d1-4f38-b4b0-d1b78b6180ed. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1422.125105] env[62510]: DEBUG nova.network.neutron [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Updating instance_info_cache with network_info: [{"id": "210d5dee-24d1-4f38-b4b0-d1b78b6180ed", "address": "fa:16:3e:13:c0:2d", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap210d5dee-24", "ovs_interfaceid": "210d5dee-24d1-4f38-b4b0-d1b78b6180ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.176221] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "refresh_cache-34a464e2-d38e-4c24-a487-c62a4f484667" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.176392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired lock "refresh_cache-34a464e2-d38e-4c24-a487-c62a4f484667" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.176553] env[62510]: DEBUG nova.network.neutron [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1422.226986] env[62510]: DEBUG nova.compute.manager [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1422.229709] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1422.230774] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895c4968-35ed-4085-853b-1cf547763f01 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.240619] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1422.241092] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7cd2ffbf-82d3-4967-ac82-7c049d5d299e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.250884] env[62510]: DEBUG oslo_vmware.api [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1422.250884] env[62510]: value = "task-1768272" [ 1422.250884] env[62510]: _type = "Task" [ 1422.250884] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.263096] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d50447-9890-e2a3-ef02-0d3bc7e30381, 'name': SearchDatastore_Task, 'duration_secs': 0.010702} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.264193] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dca97497-e1fb-4670-b08b-31ab9ae3e8f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.271042] env[62510]: DEBUG oslo_vmware.api [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768272, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.281572] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1422.281572] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cb91a6-2cc3-6635-0e91-8d493c9207cc" [ 1422.281572] env[62510]: _type = "Task" [ 1422.281572] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.289905] env[62510]: INFO nova.compute.manager [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Took 32.71 seconds to build instance. [ 1422.298032] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cb91a6-2cc3-6635-0e91-8d493c9207cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.336885] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768270, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.367846] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.392101] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0a940fd0-73cc-403d-9afc-a989c67dfdef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1422.392101] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 12768001-6ed0-47be-8f20-c59ee82b842a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.392101] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Migration a3d89ea5-941d-4795-af70-8061e49c8be5 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1422.392101] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 35a98028-0fc6-4e13-b50d-5dacf205dbe5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.392101] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 75e06a24-b96c-4a42-bc2d-b0b960e3301a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.392101] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.558846] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768268, 'name': ReconfigVM_Task, 'duration_secs': 1.367652} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.558846] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 8ffa27e9-6a3b-48d1-aed4-c808089788d9/8ffa27e9-6a3b-48d1-aed4-c808089788d9.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.558846] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5cde181-6c46-48a1-aa5c-01d621fe24d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.564457] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1422.564457] env[62510]: value = "task-1768273" [ 1422.564457] env[62510]: _type = "Task" [ 1422.564457] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.573986] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768273, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.632031] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Releasing lock "refresh_cache-35a98028-0fc6-4e13-b50d-5dacf205dbe5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.632031] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Received event network-vif-plugged-a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1422.632031] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Acquiring lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.632031] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.632031] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.632031] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] No waiting events found dispatching network-vif-plugged-a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1422.632031] env[62510]: WARNING nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Received unexpected event network-vif-plugged-a6e31bab-0459-42fe-8756-d37cc3fa3e88 for instance with vm_state building and task_state spawning. [ 1422.632031] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Received event network-changed-a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1422.632031] env[62510]: DEBUG nova.compute.manager [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Refreshing instance network info cache due to event network-changed-a6e31bab-0459-42fe-8756-d37cc3fa3e88. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1422.632031] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Acquiring lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.632031] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Acquired lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.632031] env[62510]: DEBUG nova.network.neutron [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Refreshing network info cache for port a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.719135] env[62510]: DEBUG nova.network.neutron [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1422.763104] env[62510]: DEBUG oslo_vmware.api [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768272, 'name': PowerOffVM_Task, 'duration_secs': 0.38241} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.763623] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1422.764344] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1422.764448] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ab7cdfe-f125-4d31-b060-995a92296b98 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.796316] env[62510]: DEBUG oslo_concurrency.lockutils [None req-51bd2aca-ccb3-4233-8c3c-d7011096c09f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.231s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.797077] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cb91a6-2cc3-6635-0e91-8d493c9207cc, 'name': SearchDatastore_Task, 'duration_secs': 0.051387} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.798612] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1422.799917] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4e735bb6-f167-4c2b-b44e-d2dd3040603d/4e735bb6-f167-4c2b-b44e-d2dd3040603d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1422.802947] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2b70999-17db-4ad6-bddd-b5a10272424a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.812981] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1422.812981] env[62510]: value = "task-1768275" [ 1422.812981] env[62510]: _type = "Task" [ 1422.812981] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.824105] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768275, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.838197] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768270, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.032593} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.838533] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1422.838800] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1422.839444] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe8c39fa-849a-494e-8260-7783aed98d9b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.847652] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1422.847652] env[62510]: value = "task-1768276" [ 1422.847652] env[62510]: _type = "Task" [ 1422.847652] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.859914] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768276, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.865618] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768271, 'name': ReconfigVM_Task, 'duration_secs': 0.706095} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.865956] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 35a98028-0fc6-4e13-b50d-5dacf205dbe5/35a98028-0fc6-4e13-b50d-5dacf205dbe5.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.866671] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-13a1af45-bb78-44a7-bb2e-ec08e795fe8f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.874010] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1422.874010] env[62510]: value = "task-1768277" [ 1422.874010] env[62510]: _type = "Task" [ 1422.874010] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.883313] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768277, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.898962] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 7490c825-dfd5-409c-9fd6-0e78643338fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1422.899243] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance eb840df4-edc1-44cb-84c9-f31b7b56b6bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.899628] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 5588650b-c450-489a-a456-3b580a5b9114 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.936735] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1422.936993] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1422.937758] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Deleting the datastore file [datastore1] eb840df4-edc1-44cb-84c9-f31b7b56b6bd {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1422.938139] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7f9d8e6-0414-4509-9376-5f8d71f4ef8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.948614] env[62510]: DEBUG oslo_vmware.api [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1422.948614] env[62510]: value = "task-1768278" [ 1422.948614] env[62510]: _type = "Task" [ 1422.948614] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.956669] env[62510]: DEBUG oslo_vmware.api [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.079620] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768273, 'name': Rename_Task, 'duration_secs': 0.333914} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.079964] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1423.080202] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-577b5b2a-28b0-4fa4-b8cc-7e30bff45848 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.087262] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1423.087262] env[62510]: value = "task-1768279" [ 1423.087262] env[62510]: _type = "Task" [ 1423.087262] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.098341] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768279, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.207748] env[62510]: DEBUG nova.network.neutron [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Updating instance_info_cache with network_info: [{"id": "dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb", "address": "fa:16:3e:df:cf:5a", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.97", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba71e12-5d", "ovs_interfaceid": "dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.302226] env[62510]: DEBUG nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1423.329748] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768275, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.358812] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768276, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067859} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.359095] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1423.363102] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879401f7-1bf5-4e85-8159-507ca80c35d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.388976] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1423.393826] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc2a3c77-71e9-4d92-bcfa-32a7c2d11432 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.418471] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 7cc6d4a6-2765-44e7-b378-e213a562593d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.429308] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768277, 'name': Rename_Task, 'duration_secs': 0.179698} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.430128] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1423.430448] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1423.430448] env[62510]: value = "task-1768280" [ 1423.430448] env[62510]: _type = "Task" [ 1423.430448] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.430637] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-741e408e-8656-461d-babf-3ad030cfa102 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.446329] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.446329] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1423.446329] env[62510]: value = "task-1768281" [ 1423.446329] env[62510]: _type = "Task" [ 1423.446329] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.463270] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768281, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.467106] env[62510]: DEBUG oslo_vmware.api [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.465614} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.467362] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1423.467579] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1423.467766] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1423.467944] env[62510]: INFO nova.compute.manager [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1423.468239] env[62510]: DEBUG oslo.service.loopingcall [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.470942] env[62510]: DEBUG nova.compute.manager [-] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1423.471071] env[62510]: DEBUG nova.network.neutron [-] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1423.607653] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768279, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.661609] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "a040671e-941d-4406-81af-f2f7a4b690e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.661940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "a040671e-941d-4406-81af-f2f7a4b690e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.713345] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Releasing lock "refresh_cache-34a464e2-d38e-4c24-a487-c62a4f484667" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.713345] env[62510]: DEBUG nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Instance network_info: |[{"id": "dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb", "address": "fa:16:3e:df:cf:5a", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.97", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba71e12-5d", "ovs_interfaceid": "dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1423.715542] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:cf:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1423.727792] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Creating folder: Project (1b2875fef23d486a900e5909a704c64b). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1423.728452] env[62510]: DEBUG nova.network.neutron [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updated VIF entry in instance network info cache for port a6e31bab-0459-42fe-8756-d37cc3fa3e88. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1423.728776] env[62510]: DEBUG nova.network.neutron [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [{"id": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "address": "fa:16:3e:7d:cb:3f", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e31bab-04", "ovs_interfaceid": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.734280] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62ad00dc-b89c-4736-8892-47a8cdd28717 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.752352] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Created folder: Project (1b2875fef23d486a900e5909a704c64b) in parent group-v367197. [ 1423.752352] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Creating folder: Instances. Parent ref: group-v367234. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1423.752352] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5f62492-71ea-4075-b271-e91dae520a57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.762286] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Created folder: Instances in parent group-v367234. [ 1423.762609] env[62510]: DEBUG oslo.service.loopingcall [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.762892] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1423.763318] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba0f301d-2094-4c93-a346-60f30af4e834 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.786554] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "731e7110-9709-4c4e-96d2-00e21e67c6e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.786842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "731e7110-9709-4c4e-96d2-00e21e67c6e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.787070] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "731e7110-9709-4c4e-96d2-00e21e67c6e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.787272] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "731e7110-9709-4c4e-96d2-00e21e67c6e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.787444] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "731e7110-9709-4c4e-96d2-00e21e67c6e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.790612] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1423.790612] env[62510]: value = "task-1768284" [ 1423.790612] env[62510]: _type = "Task" [ 1423.790612] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.792128] env[62510]: INFO nova.compute.manager [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Terminating instance [ 1423.804903] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768284, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.824201] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768275, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558421} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.824488] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4e735bb6-f167-4c2b-b44e-d2dd3040603d/4e735bb6-f167-4c2b-b44e-d2dd3040603d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1423.824771] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1423.825041] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42f94647-2be5-40c1-9104-ba25feed4cfc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.832686] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1423.832686] env[62510]: value = "task-1768285" [ 1423.832686] env[62510]: _type = "Task" [ 1423.832686] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.842877] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768285, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.844014] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.930521] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.950839] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.969471] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768281, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.100604] env[62510]: DEBUG oslo_vmware.api [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768279, 'name': PowerOnVM_Task, 'duration_secs': 0.897544} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.101092] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.101229] env[62510]: INFO nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Took 12.13 seconds to spawn the instance on the hypervisor. [ 1424.101463] env[62510]: DEBUG nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1424.102220] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790c3289-176b-40bf-988b-884e7bccea6f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.238458] env[62510]: DEBUG oslo_concurrency.lockutils [req-429e9dc1-e7ae-4db0-911c-d7139c098c74 req-4fb3eb18-d348-4b17-a5dc-8b4acb888634 service nova] Releasing lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.295821] env[62510]: DEBUG nova.compute.manager [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Received event network-vif-deleted-c9624a79-da6f-44aa-87fe-e5872f2e1d7d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1424.296401] env[62510]: DEBUG nova.compute.manager [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Received event network-vif-plugged-dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1424.296747] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] Acquiring lock "34a464e2-d38e-4c24-a487-c62a4f484667-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.298157] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] Lock "34a464e2-d38e-4c24-a487-c62a4f484667-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.298157] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] Lock "34a464e2-d38e-4c24-a487-c62a4f484667-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.298157] env[62510]: DEBUG nova.compute.manager [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] No waiting events found dispatching network-vif-plugged-dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1424.298157] env[62510]: WARNING nova.compute.manager [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Received unexpected event network-vif-plugged-dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb for instance with vm_state building and task_state spawning. [ 1424.298157] env[62510]: DEBUG nova.compute.manager [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Received event network-changed-dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1424.298157] env[62510]: DEBUG nova.compute.manager [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Refreshing instance network info cache due to event network-changed-dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1424.298498] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] Acquiring lock "refresh_cache-34a464e2-d38e-4c24-a487-c62a4f484667" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.298498] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] Acquired lock "refresh_cache-34a464e2-d38e-4c24-a487-c62a4f484667" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.298498] env[62510]: DEBUG nova.network.neutron [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Refreshing network info cache for port dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.301008] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "refresh_cache-731e7110-9709-4c4e-96d2-00e21e67c6e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.301008] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquired lock "refresh_cache-731e7110-9709-4c4e-96d2-00e21e67c6e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.301008] env[62510]: DEBUG nova.network.neutron [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.314240] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768284, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.344903] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768285, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07782} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.344903] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1424.345690] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01667a2a-9739-41c6-899d-ff0c1bf0f014 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.369978] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 4e735bb6-f167-4c2b-b44e-d2dd3040603d/4e735bb6-f167-4c2b-b44e-d2dd3040603d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1424.370389] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f206ffd-2315-499d-97c6-7c848d1a499f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.391394] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1424.391394] env[62510]: value = "task-1768286" [ 1424.391394] env[62510]: _type = "Task" [ 1424.391394] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.400636] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768286, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.436557] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance d42295c9-2b0e-471e-9a87-1d7367de9588 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1424.436732] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1424.436788] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1424.457889] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768280, 'name': ReconfigVM_Task, 'duration_secs': 0.592959} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.460048] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 5588650b-c450-489a-a456-3b580a5b9114/5588650b-c450-489a-a456-3b580a5b9114.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1424.461052] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac5439d6-b9fd-4ee9-a70c-81a22d284994 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.472556] env[62510]: DEBUG oslo_vmware.api [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768281, 'name': PowerOnVM_Task, 'duration_secs': 0.735365} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.472974] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1424.472974] env[62510]: value = "task-1768287" [ 1424.472974] env[62510]: _type = "Task" [ 1424.472974] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.473200] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.473909] env[62510]: INFO nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1424.473909] env[62510]: DEBUG nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1424.474444] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def16237-0e9c-41ec-bf98-9cc2d81fecef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.494467] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768287, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.625074] env[62510]: INFO nova.compute.manager [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Took 23.78 seconds to build instance. [ 1424.822345] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768284, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.856443] env[62510]: DEBUG nova.network.neutron [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1424.902611] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.945363] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b6cb6f-e0e6-48f5-81f5-e27d608ba00c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.952795] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7c15fe-47e1-4af0-8e72-99b283899c17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.991817] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e240d5d6-84fd-448b-974d-d6d37a5c6780 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.004237] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768287, 'name': Rename_Task, 'duration_secs': 0.326659} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.007164] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb42def-6e1d-4622-b4fa-fbf4484cd31c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.011625] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1425.012224] env[62510]: INFO nova.compute.manager [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Took 20.57 seconds to build instance. [ 1425.013280] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63cfa253-cfc2-4120-8756-bfbcc75f4cc5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.026163] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.029555] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1425.029555] env[62510]: value = "task-1768288" [ 1425.029555] env[62510]: _type = "Task" [ 1425.029555] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.039302] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768288, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.126435] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a0c25e-7750-4fbb-9651-8e5dafe1fe68 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.962s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.235146] env[62510]: DEBUG nova.network.neutron [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.313915] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768284, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.372702] env[62510]: DEBUG nova.network.neutron [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Updated VIF entry in instance network info cache for port dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.373121] env[62510]: DEBUG nova.network.neutron [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Updating instance_info_cache with network_info: [{"id": "dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb", "address": "fa:16:3e:df:cf:5a", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.97", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdba71e12-5d", "ovs_interfaceid": "dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.380576] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "aca56820-5a06-43dd-9d98-25421f7ef6a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.383626] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.402769] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768286, 'name': ReconfigVM_Task, 'duration_secs': 0.676767} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.403331] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 4e735bb6-f167-4c2b-b44e-d2dd3040603d/4e735bb6-f167-4c2b-b44e-d2dd3040603d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1425.404198] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0deed45c-9da1-4987-834d-8113ce3a140d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.411164] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1425.411164] env[62510]: value = "task-1768289" [ 1425.411164] env[62510]: _type = "Task" [ 1425.411164] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.418261] env[62510]: DEBUG nova.compute.manager [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Received event network-changed-f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1425.418451] env[62510]: DEBUG nova.compute.manager [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Refreshing instance network info cache due to event network-changed-f3011c4d-9d43-4939-9157-df0532a51861. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1425.418663] env[62510]: DEBUG oslo_concurrency.lockutils [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] Acquiring lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.418807] env[62510]: DEBUG oslo_concurrency.lockutils [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] Acquired lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.418967] env[62510]: DEBUG nova.network.neutron [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Refreshing network info cache for port f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1425.426933] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768289, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.516923] env[62510]: DEBUG oslo_concurrency.lockutils [None req-419f46ff-612d-4151-af08-a7ed2ec0f64c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.914s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.532720] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1425.553431] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768288, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.629321] env[62510]: DEBUG nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1425.725442] env[62510]: DEBUG nova.network.neutron [-] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.742059] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Releasing lock "refresh_cache-731e7110-9709-4c4e-96d2-00e21e67c6e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.742509] env[62510]: DEBUG nova.compute.manager [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1425.742968] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1425.743708] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e2fed5-b8ff-4a33-8c7b-78eb52a9d375 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.755655] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1425.755935] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad34ea49-908f-4b59-aea7-810dd9c66fe1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.763622] env[62510]: DEBUG oslo_vmware.api [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1425.763622] env[62510]: value = "task-1768290" [ 1425.763622] env[62510]: _type = "Task" [ 1425.763622] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.780460] env[62510]: DEBUG oslo_vmware.api [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.818140] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768284, 'name': CreateVM_Task, 'duration_secs': 1.596474} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.818418] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1425.819253] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.820017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.820017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1425.820433] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5df59e8-64cb-4e65-b2eb-6becac5342d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.826523] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1425.826523] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52604b90-302e-58d1-4fb1-bcff9b84c552" [ 1425.826523] env[62510]: _type = "Task" [ 1425.826523] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.836530] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52604b90-302e-58d1-4fb1-bcff9b84c552, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.876937] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f9a3dec-e3b3-44ab-b1a9-3b2cd85853ed req-2985b864-85ed-4199-a45f-3d96dd40aa41 service nova] Releasing lock "refresh_cache-34a464e2-d38e-4c24-a487-c62a4f484667" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.925329] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768289, 'name': Rename_Task, 'duration_secs': 0.180892} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.925609] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1425.926689] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9483103d-6d60-4dfd-950c-ca28080dbf19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.934825] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1425.934825] env[62510]: value = "task-1768291" [ 1425.934825] env[62510]: _type = "Task" [ 1425.934825] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.948449] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768291, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.020015] env[62510]: DEBUG nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1426.037349] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1426.040617] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.229s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.045383] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.397s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.053541] env[62510]: INFO nova.compute.claims [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1426.067409] env[62510]: DEBUG oslo_vmware.api [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768288, 'name': PowerOnVM_Task, 'duration_secs': 0.682736} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.067409] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1426.067409] env[62510]: DEBUG nova.compute.manager [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1426.067587] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61b4403-b1e3-40e6-b396-e77d985dfe97 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.168356] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.230126] env[62510]: INFO nova.compute.manager [-] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Took 2.76 seconds to deallocate network for instance. [ 1426.279289] env[62510]: DEBUG oslo_vmware.api [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768290, 'name': PowerOffVM_Task, 'duration_secs': 0.184374} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.279601] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1426.279774] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1426.280400] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c2ae72a-3989-4e06-ab87-76ef1d8b13af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.306067] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1426.306280] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1426.306669] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Deleting the datastore file [datastore1] 731e7110-9709-4c4e-96d2-00e21e67c6e3 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1426.306954] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f71be061-ea83-4112-9217-f3003ca5b3a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.317174] env[62510]: DEBUG oslo_vmware.api [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for the task: (returnval){ [ 1426.317174] env[62510]: value = "task-1768293" [ 1426.317174] env[62510]: _type = "Task" [ 1426.317174] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.324726] env[62510]: DEBUG oslo_vmware.api [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.337244] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52604b90-302e-58d1-4fb1-bcff9b84c552, 'name': SearchDatastore_Task, 'duration_secs': 0.011569} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.337713] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.337969] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1426.338469] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.338469] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.339517] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1426.339517] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ddf5c26-45b4-4afc-b2dc-5de584458cd2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.355904] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1426.356106] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1426.356913] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9038de53-58ae-4b42-9c8a-0510b26fe11b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.362731] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1426.362731] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52169a5f-6691-cb97-5a70-1cf88c052230" [ 1426.362731] env[62510]: _type = "Task" [ 1426.362731] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.371444] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52169a5f-6691-cb97-5a70-1cf88c052230, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.379487] env[62510]: DEBUG nova.network.neutron [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Updated VIF entry in instance network info cache for port f3011c4d-9d43-4939-9157-df0532a51861. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1426.379793] env[62510]: DEBUG nova.network.neutron [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Updating instance_info_cache with network_info: [{"id": "f3011c4d-9d43-4939-9157-df0532a51861", "address": "fa:16:3e:33:a9:3e", "network": {"id": "1e2d2394-0caf-483c-8fdd-819cbb3d155f", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-79341046-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44fbb61ad5364e0cb30d884cf96fe671", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3011c4d-9d", "ovs_interfaceid": "f3011c4d-9d43-4939-9157-df0532a51861", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.447498] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768291, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.551158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.589415] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.739754] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.831639] env[62510]: DEBUG oslo_vmware.api [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Task: {'id': task-1768293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12609} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.832378] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1426.833370] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1426.834087] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1426.834422] env[62510]: INFO nova.compute.manager [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1426.835324] env[62510]: DEBUG oslo.service.loopingcall [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.835324] env[62510]: DEBUG nova.compute.manager [-] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1426.835877] env[62510]: DEBUG nova.network.neutron [-] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1426.860201] env[62510]: DEBUG nova.network.neutron [-] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1426.874632] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52169a5f-6691-cb97-5a70-1cf88c052230, 'name': SearchDatastore_Task, 'duration_secs': 0.009352} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.875455] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6d15faf-7b94-445f-8813-d09fa8583d08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.881552] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1426.881552] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b23649-244d-74ed-854e-47f33989903a" [ 1426.881552] env[62510]: _type = "Task" [ 1426.881552] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.882136] env[62510]: DEBUG oslo_concurrency.lockutils [req-2135c7c9-70ac-4784-b27b-701ae75acaa9 req-40f61c20-e20b-4392-bf90-c08e67fd68ae service nova] Releasing lock "refresh_cache-612e95d6-28ef-4c9a-b5d9-fd83122bfa44" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.891420] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b23649-244d-74ed-854e-47f33989903a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.950047] env[62510]: DEBUG oslo_vmware.api [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768291, 'name': PowerOnVM_Task, 'duration_secs': 0.59861} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.950393] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1426.950601] env[62510]: INFO nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1426.950920] env[62510]: DEBUG nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1426.951753] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525d7dfa-2a0e-49dd-a30d-01590aec142b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.046456] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.046756] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.046938] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1427.047070] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1427.366864] env[62510]: DEBUG nova.network.neutron [-] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.400310] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b23649-244d-74ed-854e-47f33989903a, 'name': SearchDatastore_Task, 'duration_secs': 0.011782} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.400708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.400807] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 34a464e2-d38e-4c24-a487-c62a4f484667/34a464e2-d38e-4c24-a487-c62a4f484667.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1427.401082] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5fa5bd7b-0be3-4751-860e-a59c562ea893 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.411880] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1427.411880] env[62510]: value = "task-1768294" [ 1427.411880] env[62510]: _type = "Task" [ 1427.411880] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.419823] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.474449] env[62510]: INFO nova.compute.manager [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Took 22.83 seconds to build instance. [ 1427.543956] env[62510]: DEBUG nova.compute.manager [req-285a5270-1b27-4fe0-abf7-eabd26653db7 req-86563a5d-9086-419f-be8a-f293afcfb45f service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-vif-deleted-e83f36f6-e38c-49b4-b419-59f9030e6005 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1427.544194] env[62510]: DEBUG nova.compute.manager [req-285a5270-1b27-4fe0-abf7-eabd26653db7 req-86563a5d-9086-419f-be8a-f293afcfb45f service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-vif-deleted-ca5eb991-9338-4e3a-8dcc-322896c420df {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1427.544361] env[62510]: DEBUG nova.compute.manager [req-285a5270-1b27-4fe0-abf7-eabd26653db7 req-86563a5d-9086-419f-be8a-f293afcfb45f service nova] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Received event network-vif-deleted-55cae8a5-e495-4d62-a2c0-b2effaf346ec {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1427.554878] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Skipping network cache update for instance because it is being deleted. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10326}} [ 1427.555275] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Skipping network cache update for instance because it is Building. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1427.555450] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Skipping network cache update for instance because it is Building. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1427.577224] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.577404] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.577575] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1427.577755] env[62510]: DEBUG nova.objects.instance [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lazy-loading 'info_cache' on Instance uuid 0604d37b-38c5-4510-894e-b26fd44e17c5 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1427.592404] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774043dd-aab2-467d-8af5-cfd4cf092946 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.603260] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee4642e-a3a1-41df-93a8-c03c7e7fd3e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.645968] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261c05a4-b398-4786-b3a1-0933a8e269a3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.654114] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce1f47f-021c-4238-bf8b-0a6bea47858e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.669907] env[62510]: DEBUG nova.compute.provider_tree [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.874262] env[62510]: INFO nova.compute.manager [-] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Took 1.04 seconds to deallocate network for instance. [ 1427.927270] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768294, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.977652] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c083de90-654d-4fb7-a475-1fd3ffe3458a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.238s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.175656] env[62510]: DEBUG nova.scheduler.client.report [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1428.387151] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.422412] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642252} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.422845] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 34a464e2-d38e-4c24-a487-c62a4f484667/34a464e2-d38e-4c24-a487-c62a4f484667.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1428.422972] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1428.423569] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-12925a6a-94b4-4ae5-a9bf-cd681ad0ecb9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.430840] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1428.430840] env[62510]: value = "task-1768295" [ 1428.430840] env[62510]: _type = "Task" [ 1428.430840] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.438942] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768295, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.482766] env[62510]: DEBUG nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1428.607390] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1428.683142] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.638s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.683733] env[62510]: DEBUG nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1428.690302] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.104s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.690302] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1428.691442] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 19.927s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.718177] env[62510]: INFO nova.scheduler.client.report [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Deleted allocations for instance 0604d37b-38c5-4510-894e-b26fd44e17c5 [ 1428.944756] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e13b2b75-f901-45b9-9182-0913b2474a10 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "738d69b5-86b3-4f19-8291-9d38e7fab483" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.945147] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e13b2b75-f901-45b9-9182-0913b2474a10 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "738d69b5-86b3-4f19-8291-9d38e7fab483" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.952863] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768295, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071955} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.953149] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1428.954242] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c7bd24-670b-49e5-b41d-f002eca02df9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.982696] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 34a464e2-d38e-4c24-a487-c62a4f484667/34a464e2-d38e-4c24-a487-c62a4f484667.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1428.983034] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-679801c0-a425-41a2-aa3f-842b5dc6cde3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.015510] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1429.015510] env[62510]: value = "task-1768296" [ 1429.015510] env[62510]: _type = "Task" [ 1429.015510] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.030830] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768296, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.032151] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.192352] env[62510]: DEBUG nova.compute.utils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1429.192530] env[62510]: DEBUG nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Not allocating networking since 'none' was specified. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1429.201939] env[62510]: INFO nova.compute.claims [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1429.231401] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.232741] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c798ac7-77b8-4e3f-86fe-9cd211482b07 tempest-DeleteServersAdminTestJSON-1087720554 tempest-DeleteServersAdminTestJSON-1087720554-project-admin] Lock "0604d37b-38c5-4510-894e-b26fd44e17c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.331s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.533212] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.696679] env[62510]: DEBUG nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1429.709272] env[62510]: INFO nova.compute.resource_tracker [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating resource usage from migration a3d89ea5-941d-4795-af70-8061e49c8be5 [ 1429.734885] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-0604d37b-38c5-4510-894e-b26fd44e17c5" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.735699] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1429.735699] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.735828] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.736042] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.736274] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.034204] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768296, 'name': ReconfigVM_Task, 'duration_secs': 0.566195} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.034204] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 34a464e2-d38e-4c24-a487-c62a4f484667/34a464e2-d38e-4c24-a487-c62a4f484667.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1430.034204] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55fc60a0-082f-411a-8102-837c380060c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.039506] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1430.039506] env[62510]: value = "task-1768297" [ 1430.039506] env[62510]: _type = "Task" [ 1430.039506] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.053523] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768297, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.362251] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bccdc05-d9e8-4ea6-896f-77ee2d2e37b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.373926] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c93d5e-699c-47e9-a881-4c76830d70b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.415817] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cb0578-8f15-43e2-b828-6e4eb18cfb11 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.429237] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a9144c-2c8f-48e0-94c6-d2a3f20d9d87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.445033] env[62510]: DEBUG nova.compute.provider_tree [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1430.552527] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768297, 'name': Rename_Task, 'duration_secs': 0.133171} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.552919] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1430.553226] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2eb1448-e485-4bbc-a263-7e677154f7ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.555222] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "5588650b-c450-489a-a456-3b580a5b9114" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.555462] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "5588650b-c450-489a-a456-3b580a5b9114" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.555694] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "5588650b-c450-489a-a456-3b580a5b9114-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.556281] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "5588650b-c450-489a-a456-3b580a5b9114-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.556281] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "5588650b-c450-489a-a456-3b580a5b9114-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.559088] env[62510]: INFO nova.compute.manager [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Terminating instance [ 1430.562547] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1430.562547] env[62510]: value = "task-1768298" [ 1430.562547] env[62510]: _type = "Task" [ 1430.562547] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.581292] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768298, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.716802] env[62510]: DEBUG nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1430.767921] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1430.768330] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1430.768541] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1430.768770] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1430.768957] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1430.769168] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1430.769429] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1430.769623] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1430.769825] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1430.770540] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1430.770540] env[62510]: DEBUG nova.virt.hardware [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1430.773834] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb0c9d3-f1cb-49a2-b681-77311deb270d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.782162] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2516d82-eb7c-4ef3-a538-255f0e516cce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.809873] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1430.816530] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Creating folder: Project (ca7d51a105e74e02a28122be7e57284b). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1430.816959] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-448e4ea4-4b64-4f1b-8eb6-66553f777d83 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.829615] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Created folder: Project (ca7d51a105e74e02a28122be7e57284b) in parent group-v367197. [ 1430.830341] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Creating folder: Instances. Parent ref: group-v367237. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1430.830341] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36858a77-ebcb-4c7a-a1a7-855fdae6052c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.842141] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Created folder: Instances in parent group-v367237. [ 1430.842368] env[62510]: DEBUG oslo.service.loopingcall [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1430.842565] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1430.842776] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ffc03d93-4408-47d9-9172-cf9de143ae9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.863216] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1430.863216] env[62510]: value = "task-1768301" [ 1430.863216] env[62510]: _type = "Task" [ 1430.863216] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.872177] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768301, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.949470] env[62510]: DEBUG nova.scheduler.client.report [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1431.077342] env[62510]: DEBUG nova.compute.manager [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1431.081429] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1431.081429] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768298, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.081429] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffca5ea9-a6c4-4c80-b2fd-e5fe40f679cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.095357] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1431.095357] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4303f9dd-458f-45be-a2b8-c402a4da83d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.100085] env[62510]: DEBUG oslo_vmware.api [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1431.100085] env[62510]: value = "task-1768302" [ 1431.100085] env[62510]: _type = "Task" [ 1431.100085] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.115606] env[62510]: DEBUG oslo_vmware.api [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.376542] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768301, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.457701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.765s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.457701] env[62510]: INFO nova.compute.manager [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Migrating [ 1431.461169] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.461382] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.462748] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.655s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.467736] env[62510]: INFO nova.compute.claims [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1431.471144] env[62510]: INFO nova.compute.rpcapi [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 1431.471633] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.575346] env[62510]: DEBUG oslo_vmware.api [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768298, 'name': PowerOnVM_Task, 'duration_secs': 0.571188} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.575686] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1431.576583] env[62510]: INFO nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Took 11.73 seconds to spawn the instance on the hypervisor. [ 1431.576830] env[62510]: DEBUG nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1431.577664] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca918d0-bcea-4055-9c6c-c44d9225deae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.611913] env[62510]: DEBUG oslo_vmware.api [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768302, 'name': PowerOffVM_Task, 'duration_secs': 0.290155} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.612267] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1431.612432] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1431.612714] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9696e106-6d3c-448f-8032-73d9d5d87ada {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.687113] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1431.687341] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1431.687515] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleting the datastore file [datastore1] 5588650b-c450-489a-a456-3b580a5b9114 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.687778] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40c42318-7e77-4df5-b75c-dec0b45d354e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.695166] env[62510]: DEBUG oslo_vmware.api [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1431.695166] env[62510]: value = "task-1768304" [ 1431.695166] env[62510]: _type = "Task" [ 1431.695166] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.703665] env[62510]: DEBUG oslo_vmware.api [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.876593] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768301, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.991435] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.991680] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.992571] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.994479] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.994479] env[62510]: DEBUG nova.network.neutron [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1432.096581] env[62510]: INFO nova.compute.manager [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Took 25.04 seconds to build instance. [ 1432.209235] env[62510]: DEBUG oslo_vmware.api [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39299} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.209235] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.209235] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1432.209235] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1432.209235] env[62510]: INFO nova.compute.manager [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1432.209235] env[62510]: DEBUG oslo.service.loopingcall [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1432.209235] env[62510]: DEBUG nova.compute.manager [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1432.209235] env[62510]: DEBUG nova.network.neutron [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1432.389611] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768301, 'name': CreateVM_Task, 'duration_secs': 1.325527} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.395585] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1432.399701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.400072] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.400919] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1432.401948] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a386f61-77f0-409a-9812-5fe8e50579f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.410483] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1432.410483] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52604f39-c413-60a3-9820-c6d8b61bfa17" [ 1432.410483] env[62510]: _type = "Task" [ 1432.410483] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.420139] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52604f39-c413-60a3-9820-c6d8b61bfa17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.557803] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "b004fba7-13e0-40f0-827d-8d09b7717176" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.558061] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "b004fba7-13e0-40f0-827d-8d09b7717176" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.600943] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3f6c0e53-9530-46c9-974b-eaaff8ca3536 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "34a464e2-d38e-4c24-a487-c62a4f484667" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.898s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.631143] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.631441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.923747] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52604f39-c413-60a3-9820-c6d8b61bfa17, 'name': SearchDatastore_Task, 'duration_secs': 0.01054} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.924091] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.924607] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1432.924942] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1432.925349] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1432.925550] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1432.925992] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfb935fe-5394-4f8e-927c-b20d4d0ee7d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.951266] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1432.951266] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1432.951266] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10078c69-fd63-44f1-857d-69ebba091458 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.955908] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1432.955908] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523bc9bc-03e7-4c4c-59b6-fd9e24a26066" [ 1432.955908] env[62510]: _type = "Task" [ 1432.955908] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.967349] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523bc9bc-03e7-4c4c-59b6-fd9e24a26066, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.980501] env[62510]: DEBUG nova.network.neutron [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [{"id": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "address": "fa:16:3e:81:65:65", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7489ebb6-ec", "ovs_interfaceid": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1433.039043] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf40b66-9f9a-4271-8b01-239e1ec5eb45 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.049538] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dcd182-72a1-4a10-b914-4daa975b774c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.089595] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087709c3-37e2-438e-821c-2cc9e5a3853c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.098305] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddc460a-2f3a-4633-8b10-03e7c3839325 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.103383] env[62510]: DEBUG nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1433.119638] env[62510]: DEBUG nova.compute.provider_tree [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.333647] env[62510]: DEBUG nova.compute.manager [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Received event network-changed-a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1433.334279] env[62510]: DEBUG nova.compute.manager [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Refreshing instance network info cache due to event network-changed-a6e31bab-0459-42fe-8756-d37cc3fa3e88. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1433.335344] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] Acquiring lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.335344] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] Acquired lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.335344] env[62510]: DEBUG nova.network.neutron [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Refreshing network info cache for port a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1434.203083] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.206862] env[62510]: DEBUG nova.scheduler.client.report [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1434.209837] env[62510]: DEBUG nova.network.neutron [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.213516] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523bc9bc-03e7-4c4c-59b6-fd9e24a26066, 'name': SearchDatastore_Task, 'duration_secs': 0.026252} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.217160] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1248b4cf-9e7f-4bfd-a542-c5552c0cc19d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.224026] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1434.224026] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528ce272-b4dc-2b9c-7582-8890d9b02ba0" [ 1434.224026] env[62510]: _type = "Task" [ 1434.224026] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.230317] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.235818] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528ce272-b4dc-2b9c-7582-8890d9b02ba0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.515747] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "e9711202-67f3-4fe2-befb-f28722ddea33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.516176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e9711202-67f3-4fe2-befb-f28722ddea33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.663094] env[62510]: DEBUG nova.network.neutron [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updated VIF entry in instance network info cache for port a6e31bab-0459-42fe-8756-d37cc3fa3e88. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1434.663494] env[62510]: DEBUG nova.network.neutron [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [{"id": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "address": "fa:16:3e:7d:cb:3f", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e31bab-04", "ovs_interfaceid": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.717866] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.255s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.718615] env[62510]: DEBUG nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1434.727301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.385s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.728033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.735668] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.897s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.738501] env[62510]: INFO nova.compute.claims [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1434.746080] env[62510]: INFO nova.compute.manager [-] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Took 2.54 seconds to deallocate network for instance. [ 1434.767529] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528ce272-b4dc-2b9c-7582-8890d9b02ba0, 'name': SearchDatastore_Task, 'duration_secs': 0.013467} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.767529] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.767529] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1434.767529] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad783614-61d5-418f-9468-ac7048db4fb1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.774957] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1434.774957] env[62510]: value = "task-1768305" [ 1434.774957] env[62510]: _type = "Task" [ 1434.774957] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.784382] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.786386] env[62510]: INFO nova.scheduler.client.report [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Deleted allocations for instance 3266d254-4a75-4fd3-b4e7-ebeb86467cbe [ 1435.166505] env[62510]: DEBUG oslo_concurrency.lockutils [req-d2717ee3-cb7b-48e2-bdd6-8caccfdf6c68 req-b0dddc91-33e8-4547-9db4-45205a83ec77 service nova] Releasing lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.231814] env[62510]: DEBUG nova.compute.utils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1435.234195] env[62510]: DEBUG nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1435.234682] env[62510]: DEBUG nova.network.neutron [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1435.262979] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.298471] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768305, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.299194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5159202f-b352-4834-94eb-38e3a078dec5 tempest-ImagesNegativeTestJSON-1334371676 tempest-ImagesNegativeTestJSON-1334371676-project-member] Lock "3266d254-4a75-4fd3-b4e7-ebeb86467cbe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.823s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.309064] env[62510]: DEBUG nova.policy [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1bf853221064c9c9c3b74ce10f19501', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03842d36a3404265b14f6b70db40c1fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1435.320199] env[62510]: DEBUG nova.compute.manager [req-74f8483d-8427-4492-9df5-a829e9d49ed5 req-0e84a6c6-821a-43fa-b9ea-13bcc8f6bf28 service nova] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Received event network-vif-deleted-d87267d7-ec03-4d4a-a31a-9cb46a459d3c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1435.735353] env[62510]: DEBUG nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1435.765426] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f15ddf-520c-4a4b-b9a3-9dd04742b888 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.802704] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance '12768001-6ed0-47be-8f20-c59ee82b842a' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1435.824024] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768305, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.72544} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.824024] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.824024] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1435.824024] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0cbb475-e92e-45dc-9609-615eb093a6d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.833342] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1435.833342] env[62510]: value = "task-1768306" [ 1435.833342] env[62510]: _type = "Task" [ 1435.833342] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.849824] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768306, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.922165] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.922403] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.994509] env[62510]: DEBUG nova.network.neutron [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Successfully created port: af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1436.319674] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1436.323879] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e7c91b4-5d64-4fb1-b01c-0b04b2708621 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.333556] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1436.333556] env[62510]: value = "task-1768307" [ 1436.333556] env[62510]: _type = "Task" [ 1436.333556] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.353206] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.357530] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768306, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071526} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.357777] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1436.359891] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c25f08-f495-4999-863b-3ac1b0d2a768 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.390583] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1436.392410] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8dc4a44-61e4-46aa-a3ff-4a9a2ccfcf95 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.412500] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5780603c-d9a6-4f71-b028-ad32dd10c647 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.422559] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b24cd7-56a0-405b-bc09-febb530dfdf0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.426617] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1436.426617] env[62510]: value = "task-1768308" [ 1436.426617] env[62510]: _type = "Task" [ 1436.426617] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.461080] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c990e3b7-5ff4-4c7f-a299-1347b4f045f1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.467818] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768308, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.474529] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0f1a95-ba07-4bf7-9f11-20dcc0262aac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.492409] env[62510]: DEBUG nova.compute.provider_tree [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.758040] env[62510]: DEBUG nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1436.792446] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1436.792711] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1436.792908] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1436.793484] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1436.794119] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1436.794119] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1436.796888] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1436.797080] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1436.797298] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1436.797478] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1436.797659] env[62510]: DEBUG nova.virt.hardware [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1436.798590] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028f2898-f069-4f2c-b39b-db83d23472d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.808123] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "2d2ab209-8072-4e64-8170-50d96d71bc54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.808648] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.813040] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c844dfeb-eb68-4a3e-aa17-df889e2b691d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.844812] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768307, 'name': PowerOffVM_Task, 'duration_secs': 0.334056} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.845331] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1436.845331] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance '12768001-6ed0-47be-8f20-c59ee82b842a' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1436.943448] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768308, 'name': ReconfigVM_Task, 'duration_secs': 0.285609} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.943724] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1436.944444] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd0f46c4-6c10-46c9-955a-15904ce365ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.951510] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1436.951510] env[62510]: value = "task-1768309" [ 1436.951510] env[62510]: _type = "Task" [ 1436.951510] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.960088] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768309, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.000020] env[62510]: DEBUG nova.scheduler.client.report [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1437.355784] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1437.356694] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1437.357012] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1437.357312] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1437.363824] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26229065-2554-4f4f-b772-481c09bafe38 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.384720] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1437.384720] env[62510]: value = "task-1768310" [ 1437.384720] env[62510]: _type = "Task" [ 1437.384720] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.395326] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768310, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.462553] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768309, 'name': Rename_Task, 'duration_secs': 0.167385} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.462829] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1437.463084] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0395f22-7d6f-4bb8-ac0b-c86e2059a7c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.471861] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1437.471861] env[62510]: value = "task-1768311" [ 1437.471861] env[62510]: _type = "Task" [ 1437.471861] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.479160] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.501220] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.501756] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1437.506102] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.918s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.511107] env[62510]: INFO nova.compute.claims [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1437.901249] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768310, 'name': ReconfigVM_Task, 'duration_secs': 0.294882} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.901249] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance '12768001-6ed0-47be-8f20-c59ee82b842a' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1437.987470] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768311, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.019809] env[62510]: DEBUG nova.compute.utils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1438.025385] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1438.025585] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1438.206633] env[62510]: DEBUG nova.policy [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '224336007aac40e693de18cd326630d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96bb2a761e354152ba4690456124f6b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1438.257711] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.258013] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.409662] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1438.409662] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1438.409662] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1438.409662] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1438.410767] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1438.410767] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1438.410767] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1438.410767] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1438.410767] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1438.410767] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1438.410767] env[62510]: DEBUG nova.virt.hardware [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1438.416660] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Reconfiguring VM instance instance-00000003 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1438.417054] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73aa5b30-c400-4934-a804-8112b9f7cecf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.437154] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1438.437154] env[62510]: value = "task-1768312" [ 1438.437154] env[62510]: _type = "Task" [ 1438.437154] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.447019] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768312, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.485211] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768311, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.498161] env[62510]: DEBUG nova.network.neutron [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Successfully updated port: af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1438.527314] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1438.955022] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768312, 'name': ReconfigVM_Task, 'duration_secs': 0.214762} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.955022] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Reconfigured VM instance instance-00000003 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1438.955743] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd59d813-fd01-4c2b-93aa-46de27addbbb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.987753] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 12768001-6ed0-47be-8f20-c59ee82b842a/12768001-6ed0-47be-8f20-c59ee82b842a.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.997103] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d38efa35-ff5d-44fe-a6df-dd05edc719bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.014551] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.014701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquired lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.014856] env[62510]: DEBUG nova.network.neutron [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1439.022849] env[62510]: DEBUG oslo_vmware.api [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768311, 'name': PowerOnVM_Task, 'duration_secs': 1.192646} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.025107] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1439.025387] env[62510]: INFO nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Took 8.31 seconds to spawn the instance on the hypervisor. [ 1439.025615] env[62510]: DEBUG nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1439.026028] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1439.026028] env[62510]: value = "task-1768313" [ 1439.026028] env[62510]: _type = "Task" [ 1439.026028] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.026753] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d38741-72ca-4eaa-9acb-e3435b1a95d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.053094] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768313, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.224590] env[62510]: DEBUG nova.compute.manager [req-ac577db3-bea2-41fe-aa6f-07983fc41f90 req-616681e7-33d9-47df-a4a6-fe22b2a364da service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Received event network-vif-plugged-af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1439.224818] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac577db3-bea2-41fe-aa6f-07983fc41f90 req-616681e7-33d9-47df-a4a6-fe22b2a364da service nova] Acquiring lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.225127] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac577db3-bea2-41fe-aa6f-07983fc41f90 req-616681e7-33d9-47df-a4a6-fe22b2a364da service nova] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1439.225294] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac577db3-bea2-41fe-aa6f-07983fc41f90 req-616681e7-33d9-47df-a4a6-fe22b2a364da service nova] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.225494] env[62510]: DEBUG nova.compute.manager [req-ac577db3-bea2-41fe-aa6f-07983fc41f90 req-616681e7-33d9-47df-a4a6-fe22b2a364da service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] No waiting events found dispatching network-vif-plugged-af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1439.225671] env[62510]: WARNING nova.compute.manager [req-ac577db3-bea2-41fe-aa6f-07983fc41f90 req-616681e7-33d9-47df-a4a6-fe22b2a364da service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Received unexpected event network-vif-plugged-af2efe8c-492c-4033-b300-295761787dee for instance with vm_state building and task_state spawning. [ 1439.228055] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7072a37-f1dc-446e-80d0-678c714bc0c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.238026] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20504a76-0ed4-4066-a508-d3a8cd1a8cf6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.277102] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0f3e69-47a8-4858-a6c6-7a4a16a197b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.285181] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb2c179-f116-40da-a3c0-b71c595589ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.302991] env[62510]: DEBUG nova.compute.provider_tree [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.540216] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768313, 'name': ReconfigVM_Task, 'duration_secs': 0.468335} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.541560] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 12768001-6ed0-47be-8f20-c59ee82b842a/12768001-6ed0-47be-8f20-c59ee82b842a.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1439.541560] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance '12768001-6ed0-47be-8f20-c59ee82b842a' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1439.551955] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1439.567994] env[62510]: INFO nova.compute.manager [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Took 31.96 seconds to build instance. [ 1439.570217] env[62510]: DEBUG nova.network.neutron [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1439.594656] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Successfully created port: 9dffe699-6570-4729-8d57-b8ea05dff25d {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1439.599484] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1439.599484] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1439.599484] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1439.599484] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1439.599484] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1439.599484] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1439.600641] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1439.600641] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1439.600641] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1439.600641] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1439.600641] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1439.602111] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b8d37e-aaff-4795-984c-113b95b00bfc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.611637] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3c64d9-94cb-44ca-b661-862f2b20c964 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.734995] env[62510]: DEBUG nova.network.neutron [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Updating instance_info_cache with network_info: [{"id": "af2efe8c-492c-4033-b300-295761787dee", "address": "fa:16:3e:34:7f:d9", "network": {"id": "4e82b2ba-c971-4518-a64c-536e98c8a34d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1899115936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03842d36a3404265b14f6b70db40c1fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2efe8c-49", "ovs_interfaceid": "af2efe8c-492c-4033-b300-295761787dee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.806763] env[62510]: DEBUG nova.scheduler.client.report [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1439.829960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "3533a113-6f46-4b18-872d-9bc1b0481969" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1439.829960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "3533a113-6f46-4b18-872d-9bc1b0481969" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.050594] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1944615-03f0-4b32-8ae6-17313f8fa5c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.074272] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aea82018-6033-4d25-9260-8976d20af496 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "7490c825-dfd5-409c-9fd6-0e78643338fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.651s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.080816] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb16aade-ebbe-416b-b03c-c390392ba7ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.099561] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance '12768001-6ed0-47be-8f20-c59ee82b842a' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1440.239316] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Releasing lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.240260] env[62510]: DEBUG nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Instance network_info: |[{"id": "af2efe8c-492c-4033-b300-295761787dee", "address": "fa:16:3e:34:7f:d9", "network": {"id": "4e82b2ba-c971-4518-a64c-536e98c8a34d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1899115936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03842d36a3404265b14f6b70db40c1fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2efe8c-49", "ovs_interfaceid": "af2efe8c-492c-4033-b300-295761787dee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1440.240791] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:7f:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '16e15a36-a55b-4c27-b864-f284339009d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af2efe8c-492c-4033-b300-295761787dee', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1440.253223] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Creating folder: Project (03842d36a3404265b14f6b70db40c1fe). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1440.253561] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecde9419-0dbf-4c73-bd3e-f1befc508bce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.265195] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Created folder: Project (03842d36a3404265b14f6b70db40c1fe) in parent group-v367197. [ 1440.265549] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Creating folder: Instances. Parent ref: group-v367240. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1440.265798] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-308f69b8-d310-4098-9a7e-fdb796cc8718 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.274885] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Created folder: Instances in parent group-v367240. [ 1440.275177] env[62510]: DEBUG oslo.service.loopingcall [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1440.275383] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1440.275601] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-db1c3d43-aa12-4440-a19c-e8fd16be4dce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.295753] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1440.295753] env[62510]: value = "task-1768316" [ 1440.295753] env[62510]: _type = "Task" [ 1440.295753] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.306671] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768316, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.312683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.807s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.313219] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1440.318291] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.748s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.318291] env[62510]: DEBUG nova.objects.instance [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lazy-loading 'resources' on Instance uuid 585784c5-b56a-435d-8b22-53bc5cb39b25 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.581510] env[62510]: DEBUG nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1440.663538] env[62510]: DEBUG nova.network.neutron [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Port 7489ebb6-ec5f-4097-9a62-81a2d3dedd52 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1440.812599] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768316, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.818103] env[62510]: DEBUG nova.compute.utils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1440.819667] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1440.819855] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1440.890073] env[62510]: DEBUG nova.policy [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '224336007aac40e693de18cd326630d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96bb2a761e354152ba4690456124f6b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1441.131992] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.310389] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768316, 'name': CreateVM_Task, 'duration_secs': 0.521197} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.310602] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1441.311375] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.311558] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.311960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1441.312217] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be855d47-011e-41fa-a26d-bf7a68dbf6ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.318956] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1441.318956] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5285931a-b482-3cc7-7cc8-f361bfc83f9f" [ 1441.318956] env[62510]: _type = "Task" [ 1441.318956] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.328474] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1441.331715] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5285931a-b482-3cc7-7cc8-f361bfc83f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.416535] env[62510]: INFO nova.compute.manager [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Rebuilding instance [ 1441.493101] env[62510]: DEBUG nova.compute.manager [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1441.493704] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562e27d0-249b-4691-90a3-44a0377d3963 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.525396] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c3c806-b9ab-4b8a-ae9b-fc4d25f74a61 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.533791] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ba2c58-68de-4d7b-afb2-2543430be750 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.572129] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e065da-4b05-4578-802a-4a7e838dbb55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.581364] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5112911-0562-4f4c-8591-8d1f1432f7ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.598333] env[62510]: DEBUG nova.compute.provider_tree [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.700103] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.700314] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.700998] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.776930] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Successfully created port: 909eb33f-63c2-4175-9250-a6557ad136f0 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1441.831671] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5285931a-b482-3cc7-7cc8-f361bfc83f9f, 'name': SearchDatastore_Task, 'duration_secs': 0.026089} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.831671] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1441.831671] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1441.831671] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.831671] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.831671] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1441.832423] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ab58556-ab9a-4f0d-a252-3dc07d63d74d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.846115] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1441.846442] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1441.847139] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3f69e0e-67f5-4661-97e4-e4746ac3fbac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.852773] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1441.852773] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521203ba-65c6-4fd3-15e4-13cb9d15a546" [ 1441.852773] env[62510]: _type = "Task" [ 1441.852773] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.858906] env[62510]: DEBUG nova.compute.manager [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Received event network-changed-af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1441.858906] env[62510]: DEBUG nova.compute.manager [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Refreshing instance network info cache due to event network-changed-af2efe8c-492c-4033-b300-295761787dee. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1441.859994] env[62510]: DEBUG oslo_concurrency.lockutils [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] Acquiring lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.859994] env[62510]: DEBUG oslo_concurrency.lockutils [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] Acquired lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.859994] env[62510]: DEBUG nova.network.neutron [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Refreshing network info cache for port af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1441.872489] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521203ba-65c6-4fd3-15e4-13cb9d15a546, 'name': SearchDatastore_Task, 'duration_secs': 0.010547} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.875223] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c659f7c-413d-404a-8fd8-bd6ac5bac8df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.883313] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1441.883313] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52454e42-7b9e-c052-512f-8fa020d97f15" [ 1441.883313] env[62510]: _type = "Task" [ 1441.883313] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.892868] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52454e42-7b9e-c052-512f-8fa020d97f15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.103215] env[62510]: DEBUG nova.scheduler.client.report [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1442.316808] env[62510]: DEBUG nova.compute.manager [req-6d133704-7fe2-4d67-aac1-eb44557685a5 req-031c8d8f-b4cd-4898-859e-46ef5e18b3a6 service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Received event network-vif-plugged-9dffe699-6570-4729-8d57-b8ea05dff25d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1442.317048] env[62510]: DEBUG oslo_concurrency.lockutils [req-6d133704-7fe2-4d67-aac1-eb44557685a5 req-031c8d8f-b4cd-4898-859e-46ef5e18b3a6 service nova] Acquiring lock "c58184e7-bf4f-406b-a778-9b8f60740fe6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.317260] env[62510]: DEBUG oslo_concurrency.lockutils [req-6d133704-7fe2-4d67-aac1-eb44557685a5 req-031c8d8f-b4cd-4898-859e-46ef5e18b3a6 service nova] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.317420] env[62510]: DEBUG oslo_concurrency.lockutils [req-6d133704-7fe2-4d67-aac1-eb44557685a5 req-031c8d8f-b4cd-4898-859e-46ef5e18b3a6 service nova] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.317608] env[62510]: DEBUG nova.compute.manager [req-6d133704-7fe2-4d67-aac1-eb44557685a5 req-031c8d8f-b4cd-4898-859e-46ef5e18b3a6 service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] No waiting events found dispatching network-vif-plugged-9dffe699-6570-4729-8d57-b8ea05dff25d {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1442.317818] env[62510]: WARNING nova.compute.manager [req-6d133704-7fe2-4d67-aac1-eb44557685a5 req-031c8d8f-b4cd-4898-859e-46ef5e18b3a6 service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Received unexpected event network-vif-plugged-9dffe699-6570-4729-8d57-b8ea05dff25d for instance with vm_state building and task_state spawning. [ 1442.339791] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1442.375856] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1442.378106] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1442.379405] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb34ac25-e768-4b71-8c36-65cc230d3085 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.390455] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a53801-8cea-4134-a455-c0151eed84c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.400840] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52454e42-7b9e-c052-512f-8fa020d97f15, 'name': SearchDatastore_Task, 'duration_secs': 0.008909} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.411641] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.413558] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8bbafd7f-cdd1-4246-a509-2f97a6f78497/8bbafd7f-cdd1-4246-a509-2f97a6f78497.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1442.413558] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Successfully updated port: 9dffe699-6570-4729-8d57-b8ea05dff25d {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1442.414623] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca8cfd45-e29d-48a2-9bae-036a984ca868 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.423251] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1442.423251] env[62510]: value = "task-1768317" [ 1442.423251] env[62510]: _type = "Task" [ 1442.423251] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.434106] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.521555] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1442.521555] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a5ac3e2-8d7b-470f-8d9b-e898dfcac3f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.528795] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1442.528795] env[62510]: value = "task-1768318" [ 1442.528795] env[62510]: _type = "Task" [ 1442.528795] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.537590] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.614046] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.297s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.621652] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.039s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.623569] env[62510]: INFO nova.compute.claims [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.641402] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.641402] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.641402] env[62510]: DEBUG nova.compute.manager [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1442.641402] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8afe213-73d6-4c72-991d-69921d84afad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.649064] env[62510]: DEBUG nova.compute.manager [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1442.651536] env[62510]: DEBUG nova.objects.instance [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lazy-loading 'flavor' on Instance uuid c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1442.670723] env[62510]: INFO nova.scheduler.client.report [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Deleted allocations for instance 585784c5-b56a-435d-8b22-53bc5cb39b25 [ 1442.793781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.793973] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.794173] env[62510]: DEBUG nova.network.neutron [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1442.917974] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "refresh_cache-c58184e7-bf4f-406b-a778-9b8f60740fe6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.918234] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "refresh_cache-c58184e7-bf4f-406b-a778-9b8f60740fe6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.918394] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1442.930924] env[62510]: DEBUG nova.network.neutron [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Updated VIF entry in instance network info cache for port af2efe8c-492c-4033-b300-295761787dee. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.931411] env[62510]: DEBUG nova.network.neutron [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Updating instance_info_cache with network_info: [{"id": "af2efe8c-492c-4033-b300-295761787dee", "address": "fa:16:3e:34:7f:d9", "network": {"id": "4e82b2ba-c971-4518-a64c-536e98c8a34d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1899115936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03842d36a3404265b14f6b70db40c1fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2efe8c-49", "ovs_interfaceid": "af2efe8c-492c-4033-b300-295761787dee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.941238] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768317, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.045091] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768318, 'name': PowerOffVM_Task, 'duration_secs': 0.159808} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.045550] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1443.045717] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1443.047424] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d973a66-534a-4e6b-b50f-9b38703e65c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.061008] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1443.061878] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2da87c44-87ae-4999-a324-c1d5ee36d115 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.089541] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1443.089812] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1443.090177] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Deleting the datastore file [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1443.090778] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee45a004-6596-46d6-8b96-ef1ca9b54ed1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.097921] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1443.097921] env[62510]: value = "task-1768320" [ 1443.097921] env[62510]: _type = "Task" [ 1443.097921] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.109178] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768320, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.187150] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42699163-39ad-444c-adae-b2c783caa4e6 tempest-ServerDiagnosticsTest-697947385 tempest-ServerDiagnosticsTest-697947385-project-member] Lock "585784c5-b56a-435d-8b22-53bc5cb39b25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.783s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.439139] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698568} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.439139] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8bbafd7f-cdd1-4246-a509-2f97a6f78497/8bbafd7f-cdd1-4246-a509-2f97a6f78497.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1443.439139] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1443.439139] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f536200-bb47-40ca-bac8-e6cc820cd419 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.441277] env[62510]: DEBUG oslo_concurrency.lockutils [req-dfbe9180-df70-420d-b707-67135a08d4cf req-290159be-4e7b-4ee1-b9cf-8f64f8591c1c service nova] Releasing lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.445643] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1443.445643] env[62510]: value = "task-1768321" [ 1443.445643] env[62510]: _type = "Task" [ 1443.445643] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.457793] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768321, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.503162] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1443.611633] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768320, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142916} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.611847] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1443.612096] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1443.612208] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1443.663440] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1443.664363] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44061ce6-4a16-4887-aa63-e2985dcf915e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.682880] env[62510]: DEBUG oslo_vmware.api [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1443.682880] env[62510]: value = "task-1768322" [ 1443.682880] env[62510]: _type = "Task" [ 1443.682880] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.692470] env[62510]: DEBUG oslo_vmware.api [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768322, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.831445] env[62510]: DEBUG nova.network.neutron [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [{"id": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "address": "fa:16:3e:81:65:65", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7489ebb6-ec", "ovs_interfaceid": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.876923] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Updating instance_info_cache with network_info: [{"id": "9dffe699-6570-4729-8d57-b8ea05dff25d", "address": "fa:16:3e:ac:f3:39", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dffe699-65", "ovs_interfaceid": "9dffe699-6570-4729-8d57-b8ea05dff25d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.956222] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768321, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080211} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.956712] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1443.958082] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5fc3c69-8ea2-41f9-828b-39df10f3897b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.994168] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 8bbafd7f-cdd1-4246-a509-2f97a6f78497/8bbafd7f-cdd1-4246-a509-2f97a6f78497.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1443.994295] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-295a38ae-84ac-4046-93df-2cab5594415b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.018248] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1444.018248] env[62510]: value = "task-1768323" [ 1444.018248] env[62510]: _type = "Task" [ 1444.018248] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.026579] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.185522] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Successfully updated port: 909eb33f-63c2-4175-9250-a6557ad136f0 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.209613] env[62510]: DEBUG oslo_vmware.api [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768322, 'name': PowerOffVM_Task, 'duration_secs': 0.189285} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.209613] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1444.209613] env[62510]: DEBUG nova.compute.manager [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1444.209613] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08fcd33-269e-40a6-b796-1db75fff4610 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.307312] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a034d8-ec17-4a0e-9a0c-414584320beb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.315677] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee49f454-9a43-412f-9225-c85b0eb51eaf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.348233] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.352930] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a669077-7ea7-417e-bc67-156ab0eca786 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.363831] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986a0926-2306-4d69-a8bd-a7927a6588f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.380027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "refresh_cache-c58184e7-bf4f-406b-a778-9b8f60740fe6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.380155] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Instance network_info: |[{"id": "9dffe699-6570-4729-8d57-b8ea05dff25d", "address": "fa:16:3e:ac:f3:39", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dffe699-65", "ovs_interfaceid": "9dffe699-6570-4729-8d57-b8ea05dff25d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1444.380617] env[62510]: DEBUG nova.compute.provider_tree [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.382098] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:f3:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9dffe699-6570-4729-8d57-b8ea05dff25d', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1444.389796] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Creating folder: Project (96bb2a761e354152ba4690456124f6b8). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1444.391033] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e427cd5-4e20-456d-acb9-93eab3fb4137 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.403089] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Created folder: Project (96bb2a761e354152ba4690456124f6b8) in parent group-v367197. [ 1444.403315] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Creating folder: Instances. Parent ref: group-v367243. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1444.403553] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d32aac1-57e8-4a97-bafb-7fa4777483b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.415741] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Created folder: Instances in parent group-v367243. [ 1444.415741] env[62510]: DEBUG oslo.service.loopingcall [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.415741] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1444.415741] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fd8502e-9287-4f68-9f4d-1f0270f588bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.436322] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1444.436322] env[62510]: value = "task-1768326" [ 1444.436322] env[62510]: _type = "Task" [ 1444.436322] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.444452] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768326, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.531377] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768323, 'name': ReconfigVM_Task, 'duration_secs': 0.324639} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.531377] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 8bbafd7f-cdd1-4246-a509-2f97a6f78497/8bbafd7f-cdd1-4246-a509-2f97a6f78497.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1444.531937] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c968187-b6be-4563-bb74-0378fa3b91d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.538790] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1444.538790] env[62510]: value = "task-1768327" [ 1444.538790] env[62510]: _type = "Task" [ 1444.538790] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.547837] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768327, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.590790] env[62510]: DEBUG nova.compute.manager [req-3c4f8393-7163-46e5-9527-fcdeacc82652 req-2ecd3a97-a6e0-4bf0-a6bb-2ff89b27d5c6 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Received event network-vif-plugged-909eb33f-63c2-4175-9250-a6557ad136f0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1444.591039] env[62510]: DEBUG oslo_concurrency.lockutils [req-3c4f8393-7163-46e5-9527-fcdeacc82652 req-2ecd3a97-a6e0-4bf0-a6bb-2ff89b27d5c6 service nova] Acquiring lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.591269] env[62510]: DEBUG oslo_concurrency.lockutils [req-3c4f8393-7163-46e5-9527-fcdeacc82652 req-2ecd3a97-a6e0-4bf0-a6bb-2ff89b27d5c6 service nova] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.591458] env[62510]: DEBUG oslo_concurrency.lockutils [req-3c4f8393-7163-46e5-9527-fcdeacc82652 req-2ecd3a97-a6e0-4bf0-a6bb-2ff89b27d5c6 service nova] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.591630] env[62510]: DEBUG nova.compute.manager [req-3c4f8393-7163-46e5-9527-fcdeacc82652 req-2ecd3a97-a6e0-4bf0-a6bb-2ff89b27d5c6 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] No waiting events found dispatching network-vif-plugged-909eb33f-63c2-4175-9250-a6557ad136f0 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1444.591826] env[62510]: WARNING nova.compute.manager [req-3c4f8393-7163-46e5-9527-fcdeacc82652 req-2ecd3a97-a6e0-4bf0-a6bb-2ff89b27d5c6 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Received unexpected event network-vif-plugged-909eb33f-63c2-4175-9250-a6557ad136f0 for instance with vm_state building and task_state spawning. [ 1444.659671] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1444.659949] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1444.661039] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.661039] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1444.661039] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.661302] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1444.661509] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1444.661742] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1444.662535] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1444.662535] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1444.662711] env[62510]: DEBUG nova.virt.hardware [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1444.664252] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea432da-4b31-4b1c-bd3d-e9d68a17992c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.672836] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8eb60fe-475b-40c3-84fc-e6d6354093f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.689173] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1444.696651] env[62510]: DEBUG oslo.service.loopingcall [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.697644] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "refresh_cache-26b283b0-98b4-4a15-abe0-fbf97e1f49eb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.697808] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "refresh_cache-26b283b0-98b4-4a15-abe0-fbf97e1f49eb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.697972] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1444.701085] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1444.701085] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b9b1b96-adc6-4485-ae21-9c03d411d113 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.722381] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1444.722381] env[62510]: value = "task-1768328" [ 1444.722381] env[62510]: _type = "Task" [ 1444.722381] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.728882] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80405ff8-cb7b-42f3-bae0-9748ec0bd253 tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.091s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.733176] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768328, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.754057] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1444.765785] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.765785] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.880869] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c315507-a5ab-4898-9329-582396ebd759 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.908492] env[62510]: DEBUG nova.scheduler.client.report [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1444.913274] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Updating instance_info_cache with network_info: [{"id": "909eb33f-63c2-4175-9250-a6557ad136f0", "address": "fa:16:3e:11:49:f1", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap909eb33f-63", "ovs_interfaceid": "909eb33f-63c2-4175-9250-a6557ad136f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.914897] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97e0832-6478-4671-870a-7b482023e30c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.925909] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance '12768001-6ed0-47be-8f20-c59ee82b842a' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1444.948211] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768326, 'name': CreateVM_Task, 'duration_secs': 0.380621} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.948519] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1444.949567] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.949803] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.950308] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1444.950682] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f80fc430-0157-4fee-a9fc-6047798a2b39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.956036] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1444.956036] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52208cd0-c6a3-ff5d-725d-69a389f5e6b2" [ 1444.956036] env[62510]: _type = "Task" [ 1444.956036] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.964915] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52208cd0-c6a3-ff5d-725d-69a389f5e6b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.038075] env[62510]: DEBUG nova.compute.manager [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Received event network-changed-9dffe699-6570-4729-8d57-b8ea05dff25d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1445.038365] env[62510]: DEBUG nova.compute.manager [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Refreshing instance network info cache due to event network-changed-9dffe699-6570-4729-8d57-b8ea05dff25d. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1445.038445] env[62510]: DEBUG oslo_concurrency.lockutils [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] Acquiring lock "refresh_cache-c58184e7-bf4f-406b-a778-9b8f60740fe6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.038719] env[62510]: DEBUG oslo_concurrency.lockutils [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] Acquired lock "refresh_cache-c58184e7-bf4f-406b-a778-9b8f60740fe6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.039032] env[62510]: DEBUG nova.network.neutron [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Refreshing network info cache for port 9dffe699-6570-4729-8d57-b8ea05dff25d {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1445.058440] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768327, 'name': Rename_Task, 'duration_secs': 0.142485} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.058829] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.059510] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43c8d9eb-1f1f-455f-96d7-02cd34d03a27 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.068868] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1445.068868] env[62510]: value = "task-1768329" [ 1445.068868] env[62510]: _type = "Task" [ 1445.068868] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.083297] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.233563] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768328, 'name': CreateVM_Task, 'duration_secs': 0.319369} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.234651] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1445.234651] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.416618] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.417162] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1445.420488] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.577s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.421967] env[62510]: INFO nova.compute.claims [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1445.425803] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "refresh_cache-26b283b0-98b4-4a15-abe0-fbf97e1f49eb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.426090] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Instance network_info: |[{"id": "909eb33f-63c2-4175-9250-a6557ad136f0", "address": "fa:16:3e:11:49:f1", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap909eb33f-63", "ovs_interfaceid": "909eb33f-63c2-4175-9250-a6557ad136f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1445.426872] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:49:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '909eb33f-63c2-4175-9250-a6557ad136f0', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.434899] env[62510]: DEBUG oslo.service.loopingcall [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1445.436598] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.436833] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1445.438026] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad181fb0-2735-4813-b8d6-8c5013639453 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.438780] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4764a40-9b79-48dd-b99c-e7bccbb005f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.464276] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1445.464276] env[62510]: value = "task-1768330" [ 1445.464276] env[62510]: _type = "Task" [ 1445.464276] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.464734] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.464734] env[62510]: value = "task-1768331" [ 1445.464734] env[62510]: _type = "Task" [ 1445.464734] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.498588] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52208cd0-c6a3-ff5d-725d-69a389f5e6b2, 'name': SearchDatastore_Task, 'duration_secs': 0.016759} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.498588] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.498588] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1445.498588] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.498588] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.498588] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1445.498588] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.498588] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1445.498588] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a6bda2c-6f54-4cf2-8c6d-3ca4990a1b76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.498588] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ca62c70-f0f0-44c4-97ce-f735e3a91a47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.498588] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768330, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.498588] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768331, 'name': CreateVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.498588] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1445.498588] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c95557-fc84-365a-f8ce-b9ac8772a92c" [ 1445.498588] env[62510]: _type = "Task" [ 1445.498588] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.501074] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1445.501299] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1445.502449] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e35f1544-8661-443a-80ab-8b4578da12e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.517856] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c95557-fc84-365a-f8ce-b9ac8772a92c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.517856] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1445.517856] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a98507-ffae-7194-4763-880a7d3412c6" [ 1445.517856] env[62510]: _type = "Task" [ 1445.517856] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.526513] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a98507-ffae-7194-4763-880a7d3412c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.580517] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768329, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.643927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "2c5c38c1-511f-4aae-969a-eb6de128fae7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.644257] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.931367] env[62510]: DEBUG nova.compute.utils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1445.937021] env[62510]: DEBUG nova.network.neutron [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Updated VIF entry in instance network info cache for port 9dffe699-6570-4729-8d57-b8ea05dff25d. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1445.937021] env[62510]: DEBUG nova.network.neutron [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Updating instance_info_cache with network_info: [{"id": "9dffe699-6570-4729-8d57-b8ea05dff25d", "address": "fa:16:3e:ac:f3:39", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9dffe699-65", "ovs_interfaceid": "9dffe699-6570-4729-8d57-b8ea05dff25d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.937021] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1445.937021] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1445.984743] env[62510]: DEBUG oslo_vmware.api [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768330, 'name': PowerOnVM_Task, 'duration_secs': 0.429042} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.988351] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1445.989726] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e92aac-c847-467c-970d-c3bde9b95c4e tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance '12768001-6ed0-47be-8f20-c59ee82b842a' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1445.992685] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768331, 'name': CreateVM_Task, 'duration_secs': 0.390154} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.992760] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1445.993713] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.003780] env[62510]: DEBUG nova.policy [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '224336007aac40e693de18cd326630d2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '96bb2a761e354152ba4690456124f6b8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1446.012400] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c95557-fc84-365a-f8ce-b9ac8772a92c, 'name': SearchDatastore_Task, 'duration_secs': 0.010642} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.012845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.012966] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.013140] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.013369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.013695] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1446.013944] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-152ed8c7-0241-4253-9ec3-431b388b962c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.023106] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1446.023106] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522ad8e2-cb0c-1020-9607-a5bfbd88c97e" [ 1446.023106] env[62510]: _type = "Task" [ 1446.023106] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.027115] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a98507-ffae-7194-4763-880a7d3412c6, 'name': SearchDatastore_Task, 'duration_secs': 0.011279} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.032138] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da45fa25-1725-4347-be5e-a1a45bfa4f4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.039784] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522ad8e2-cb0c-1020-9607-a5bfbd88c97e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.041337] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1446.041337] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5278f804-a60d-e3d3-e1c1-3bba0b77dea9" [ 1446.041337] env[62510]: _type = "Task" [ 1446.041337] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.050462] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5278f804-a60d-e3d3-e1c1-3bba0b77dea9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.080529] env[62510]: DEBUG oslo_vmware.api [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768329, 'name': PowerOnVM_Task, 'duration_secs': 0.620782} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.080822] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1446.081066] env[62510]: INFO nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Took 9.32 seconds to spawn the instance on the hypervisor. [ 1446.081308] env[62510]: DEBUG nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1446.082152] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd6c7a5-99bd-461d-a9d0-4b02397f5fcc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.404381] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Successfully created port: 7477b8b7-c766-4c58-a1dc-9db9f24198b6 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1446.438027] env[62510]: DEBUG oslo_concurrency.lockutils [req-be44cac8-e45a-4f28-bab7-d89bdf709c0a req-5bcb35de-5481-432c-8524-feeef69c68ad service nova] Releasing lock "refresh_cache-c58184e7-bf4f-406b-a778-9b8f60740fe6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.438851] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1446.540892] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522ad8e2-cb0c-1020-9607-a5bfbd88c97e, 'name': SearchDatastore_Task, 'duration_secs': 0.025573} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.541225] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.541463] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.541672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.553423] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5278f804-a60d-e3d3-e1c1-3bba0b77dea9, 'name': SearchDatastore_Task, 'duration_secs': 0.028517} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.554436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.554436] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c58184e7-bf4f-406b-a778-9b8f60740fe6/c58184e7-bf4f-406b-a778-9b8f60740fe6.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1446.554436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.554436] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.554783] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce93e6f1-46e4-444c-b4e6-706c28178a3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.556748] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-598dd127-e43b-4fbf-8b90-9fc436fbf859 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.565714] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1446.565714] env[62510]: value = "task-1768332" [ 1446.565714] env[62510]: _type = "Task" [ 1446.565714] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.569898] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.570127] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1446.571409] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-087ddf95-e093-4b8b-86d9-0c461b49f41a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.583168] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768332, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.584408] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1446.584408] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b49f65-437f-de9d-45bb-6235adbecd43" [ 1446.584408] env[62510]: _type = "Task" [ 1446.584408] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.591230] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b49f65-437f-de9d-45bb-6235adbecd43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.601929] env[62510]: INFO nova.compute.manager [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Took 33.82 seconds to build instance. [ 1446.662347] env[62510]: DEBUG nova.objects.instance [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lazy-loading 'flavor' on Instance uuid c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1447.073768] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce45497-acc2-49d1-b212-4b01067446db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.079800] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768332, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450317} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.081394] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c58184e7-bf4f-406b-a778-9b8f60740fe6/c58184e7-bf4f-406b-a778-9b8f60740fe6.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1447.081634] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1447.082886] env[62510]: DEBUG nova.compute.manager [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Received event network-changed-909eb33f-63c2-4175-9250-a6557ad136f0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1447.083029] env[62510]: DEBUG nova.compute.manager [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Refreshing instance network info cache due to event network-changed-909eb33f-63c2-4175-9250-a6557ad136f0. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1447.083248] env[62510]: DEBUG oslo_concurrency.lockutils [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] Acquiring lock "refresh_cache-26b283b0-98b4-4a15-abe0-fbf97e1f49eb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.083387] env[62510]: DEBUG oslo_concurrency.lockutils [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] Acquired lock "refresh_cache-26b283b0-98b4-4a15-abe0-fbf97e1f49eb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.083544] env[62510]: DEBUG nova.network.neutron [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Refreshing network info cache for port 909eb33f-63c2-4175-9250-a6557ad136f0 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1447.087060] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a7d2e63-3fdc-4b3f-8c7d-c950c99cec41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.093181] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39ec231-b434-4781-aaa0-c5e36ad1264d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.105339] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91aa1932-db2b-41d5-8c84-4f2ad73e77c6 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.831s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.105601] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b49f65-437f-de9d-45bb-6235adbecd43, 'name': SearchDatastore_Task, 'duration_secs': 0.008342} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.137202] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1447.137202] env[62510]: value = "task-1768333" [ 1447.137202] env[62510]: _type = "Task" [ 1447.137202] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.137202] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10964568-15b0-4366-a240-11c780116855 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.138286] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767b8bf9-2042-4e99-ab4a-0c23c11df921 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.148662] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1447.148662] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521c7d96-57a5-69eb-6e49-d737425865de" [ 1447.148662] env[62510]: _type = "Task" [ 1447.148662] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.154352] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768333, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.158579] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a083bc-b0a4-46b8-ba08-43469f4b2f95 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.170421] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521c7d96-57a5-69eb-6e49-d737425865de, 'name': SearchDatastore_Task, 'duration_secs': 0.009751} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.178697] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.178982] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1447.179826] env[62510]: DEBUG nova.compute.provider_tree [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.181370] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.181525] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquired lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.181685] env[62510]: DEBUG nova.network.neutron [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1447.181865] env[62510]: DEBUG nova.objects.instance [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lazy-loading 'info_cache' on Instance uuid c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1447.183220] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.183307] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1447.183530] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-233abd59-d97c-45b2-a7bf-fa638a913949 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.187742] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78a2ff98-63bf-40d7-bdd0-a0e2b1cb53e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.194258] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1447.194258] env[62510]: value = "task-1768334" [ 1447.194258] env[62510]: _type = "Task" [ 1447.194258] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.195733] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1447.196845] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1447.199763] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f3b4273-9a64-42ee-bb65-a462be23360c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.207075] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.208331] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1447.208331] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52007c91-e671-a59a-31d2-16e42539f655" [ 1447.208331] env[62510]: _type = "Task" [ 1447.208331] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.215834] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52007c91-e671-a59a-31d2-16e42539f655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.455173] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1447.491972] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1447.492283] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1447.492443] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1447.492627] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1447.492766] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1447.492924] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1447.493125] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1447.493285] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1447.493446] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1447.493605] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1447.493774] env[62510]: DEBUG nova.virt.hardware [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1447.494686] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bc2a72-b4d7-469b-b943-df7c67963a11 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.506492] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2228c9ee-71ff-4305-80d9-3aa2c2009351 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.636370] env[62510]: DEBUG nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1447.656448] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768333, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070009} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.656700] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1447.658891] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab87904-fbd8-49ff-a61c-69dd01c161aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.689550] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] c58184e7-bf4f-406b-a778-9b8f60740fe6/c58184e7-bf4f-406b-a778-9b8f60740fe6.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1447.693621] env[62510]: DEBUG nova.objects.base [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1447.695050] env[62510]: DEBUG nova.scheduler.client.report [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1447.697984] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdedbc8f-2c14-4353-9e61-ef771802e431 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.716305] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.716305] env[62510]: DEBUG nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1447.719492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.551s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.721926] env[62510]: INFO nova.compute.claims [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1447.745334] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1447.745334] env[62510]: value = "task-1768335" [ 1447.745334] env[62510]: _type = "Task" [ 1447.745334] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.745599] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52007c91-e671-a59a-31d2-16e42539f655, 'name': SearchDatastore_Task, 'duration_secs': 0.009805} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.745803] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45444} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.746689] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1447.746886] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1447.750781] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c981d679-69de-4092-ba1b-b694d4ba800f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.753741] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4ba37fa-daa3-4bc8-a17d-14fe31751d89 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.767025] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1447.767025] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520bbdb1-3878-615c-b05a-c36ca7b06712" [ 1447.767025] env[62510]: _type = "Task" [ 1447.767025] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.767025] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768335, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.767025] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1447.767025] env[62510]: value = "task-1768336" [ 1447.767025] env[62510]: _type = "Task" [ 1447.767025] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.783020] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520bbdb1-3878-615c-b05a-c36ca7b06712, 'name': SearchDatastore_Task, 'duration_secs': 0.009738} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.783548] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.784103] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 26b283b0-98b4-4a15-abe0-fbf97e1f49eb/26b283b0-98b4-4a15-abe0-fbf97e1f49eb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1447.785252] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768336, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.785480] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78efd4f7-2ac0-4fc5-aec6-ef0eb08c2698 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.794133] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1447.794133] env[62510]: value = "task-1768337" [ 1447.794133] env[62510]: _type = "Task" [ 1447.794133] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.803304] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768337, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.163037] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.229403] env[62510]: DEBUG nova.compute.utils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1448.234468] env[62510]: DEBUG nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1448.234664] env[62510]: DEBUG nova.network.neutron [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1448.258013] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768335, 'name': ReconfigVM_Task, 'duration_secs': 0.327683} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.258263] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Reconfigured VM instance instance-00000011 to attach disk [datastore1] c58184e7-bf4f-406b-a778-9b8f60740fe6/c58184e7-bf4f-406b-a778-9b8f60740fe6.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1448.258954] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26657083-f48a-4548-82ef-687037e65603 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.268229] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1448.268229] env[62510]: value = "task-1768338" [ 1448.268229] env[62510]: _type = "Task" [ 1448.268229] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.287425] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768338, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.293071] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768336, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080697} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.293391] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1448.294229] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807f49e7-d862-496c-87b7-0eafe96e0491 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.325190] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1448.330095] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1e34b56-65fd-412c-962e-2f367f3dd784 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.345890] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768337, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47696} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.346230] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 26b283b0-98b4-4a15-abe0-fbf97e1f49eb/26b283b0-98b4-4a15-abe0-fbf97e1f49eb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1448.346460] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1448.347144] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25c13eec-8b58-46cc-bf5d-b641a67f6c82 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.351826] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1448.351826] env[62510]: value = "task-1768339" [ 1448.351826] env[62510]: _type = "Task" [ 1448.351826] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.356453] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1448.356453] env[62510]: value = "task-1768340" [ 1448.356453] env[62510]: _type = "Task" [ 1448.356453] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.363044] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768339, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.368009] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.371548] env[62510]: DEBUG nova.network.neutron [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Updated VIF entry in instance network info cache for port 909eb33f-63c2-4175-9250-a6557ad136f0. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1448.371927] env[62510]: DEBUG nova.network.neutron [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Updating instance_info_cache with network_info: [{"id": "909eb33f-63c2-4175-9250-a6557ad136f0", "address": "fa:16:3e:11:49:f1", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap909eb33f-63", "ovs_interfaceid": "909eb33f-63c2-4175-9250-a6557ad136f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.384923] env[62510]: DEBUG nova.policy [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '525928ba40cb4cebb1e1d9e25adeae8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d6c12eccf74757b3cbc2c8acddeb19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1448.737672] env[62510]: DEBUG nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1448.788487] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768338, 'name': Rename_Task, 'duration_secs': 0.187874} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.789193] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1448.789948] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b6b89d1-862b-4528-a5e1-2017cd07e2ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.809023] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1448.809023] env[62510]: value = "task-1768341" [ 1448.809023] env[62510]: _type = "Task" [ 1448.809023] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.816486] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768341, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.865814] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768339, 'name': ReconfigVM_Task, 'duration_secs': 0.276514} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.868798] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1448.872639] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71fdfdf3-e02c-4d34-a1fe-bfcc71e494f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.874977] env[62510]: DEBUG oslo_concurrency.lockutils [req-345569a2-37da-4bc6-9d19-14a7149451f2 req-6911e7a0-aa83-4657-8c0c-cbda8a492349 service nova] Releasing lock "refresh_cache-26b283b0-98b4-4a15-abe0-fbf97e1f49eb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.875557] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1375} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.876089] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1448.876922] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee22daf-1764-4ea3-94d2-171669b95c34 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.880874] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1448.880874] env[62510]: value = "task-1768342" [ 1448.880874] env[62510]: _type = "Task" [ 1448.880874] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.905858] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 26b283b0-98b4-4a15-abe0-fbf97e1f49eb/26b283b0-98b4-4a15-abe0-fbf97e1f49eb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1448.909849] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Successfully updated port: 7477b8b7-c766-4c58-a1dc-9db9f24198b6 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1448.912032] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e82bc88-5f6c-4dfd-96fa-78c11dcee266 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.932663] env[62510]: DEBUG nova.network.neutron [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Updating instance_info_cache with network_info: [{"id": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "address": "fa:16:3e:85:5f:26", "network": {"id": "eb00b6bc-924a-4070-bdac-d34d8e726329", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974390299-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c852b1f21b054fd0b6961685dcf528f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19671de9-8b5b-4710-adc3-7419f3c0f171", "external-id": "nsx-vlan-transportzone-421", "segmentation_id": 421, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a0f3d5f-46", "ovs_interfaceid": "7a0f3d5f-4630-470a-9084-d1e05b4f306e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.934731] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "refresh_cache-9a1a0428-8ccd-4614-8853-ef3eeec23d55" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.934776] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "refresh_cache-9a1a0428-8ccd-4614-8853-ef3eeec23d55" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.934976] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1448.937953] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768342, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.943086] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1448.943086] env[62510]: value = "task-1768343" [ 1448.943086] env[62510]: _type = "Task" [ 1448.943086] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.958384] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768343, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.273375] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "12768001-6ed0-47be-8f20-c59ee82b842a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.273375] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.273375] env[62510]: DEBUG nova.compute.manager [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Going to confirm migration 1 {{(pid=62510) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1449.322411] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768341, 'name': PowerOnVM_Task, 'duration_secs': 0.491937} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.324067] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1449.324067] env[62510]: INFO nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Took 9.77 seconds to spawn the instance on the hypervisor. [ 1449.324067] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1449.324324] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f943a2c-4110-44a9-a6a2-246091482aa4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.392410] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768342, 'name': Rename_Task, 'duration_secs': 0.150213} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.392687] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1449.392934] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-677d86de-0754-4154-ae5a-bbdf47309396 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.407945] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1449.407945] env[62510]: value = "task-1768344" [ 1449.407945] env[62510]: _type = "Task" [ 1449.407945] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.417576] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.435397] env[62510]: DEBUG nova.network.neutron [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Successfully created port: 1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1449.437652] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Releasing lock "refresh_cache-c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.457451] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768343, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.504168] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d083a2-9187-4d80-8d9b-def478fef8c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.512361] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ed28f2-6068-4e63-8e86-7f0bf1abfa94 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.548165] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f884ffe3-4f98-4a80-802e-b2dd41e37386 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.556569] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bcc159-5744-4323-be28-d950d03913a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.573326] env[62510]: DEBUG nova.compute.provider_tree [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1449.619922] env[62510]: DEBUG nova.compute.manager [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Received event network-vif-plugged-7477b8b7-c766-4c58-a1dc-9db9f24198b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1449.620152] env[62510]: DEBUG oslo_concurrency.lockutils [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] Acquiring lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.620356] env[62510]: DEBUG oslo_concurrency.lockutils [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1449.620524] env[62510]: DEBUG oslo_concurrency.lockutils [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.620688] env[62510]: DEBUG nova.compute.manager [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] No waiting events found dispatching network-vif-plugged-7477b8b7-c766-4c58-a1dc-9db9f24198b6 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1449.620858] env[62510]: WARNING nova.compute.manager [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Received unexpected event network-vif-plugged-7477b8b7-c766-4c58-a1dc-9db9f24198b6 for instance with vm_state building and task_state spawning. [ 1449.621083] env[62510]: DEBUG nova.compute.manager [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Received event network-changed-7477b8b7-c766-4c58-a1dc-9db9f24198b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1449.621174] env[62510]: DEBUG nova.compute.manager [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Refreshing instance network info cache due to event network-changed-7477b8b7-c766-4c58-a1dc-9db9f24198b6. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1449.621340] env[62510]: DEBUG oslo_concurrency.lockutils [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] Acquiring lock "refresh_cache-9a1a0428-8ccd-4614-8853-ef3eeec23d55" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.641719] env[62510]: DEBUG nova.compute.manager [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Received event network-changed-af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1449.641936] env[62510]: DEBUG nova.compute.manager [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Refreshing instance network info cache due to event network-changed-af2efe8c-492c-4033-b300-295761787dee. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1449.642159] env[62510]: DEBUG oslo_concurrency.lockutils [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] Acquiring lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.642303] env[62510]: DEBUG oslo_concurrency.lockutils [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] Acquired lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.642514] env[62510]: DEBUG nova.network.neutron [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Refreshing network info cache for port af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1449.713362] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1449.753970] env[62510]: DEBUG nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:34:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1292282467',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1240407307',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1449.793560] env[62510]: DEBUG nova.virt.hardware [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1449.794341] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727ba0b3-0824-4587-b840-02ccfc07e132 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.804061] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6115855-e392-4099-875e-c46892f42287 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.846180] env[62510]: INFO nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Took 34.03 seconds to build instance. [ 1449.920442] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768344, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.955262] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768343, 'name': ReconfigVM_Task, 'duration_secs': 0.92555} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.955262] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 26b283b0-98b4-4a15-abe0-fbf97e1f49eb/26b283b0-98b4-4a15-abe0-fbf97e1f49eb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1449.956252] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bf178fe-8d2b-46b8-af0c-86da28e4f1e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.964251] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1449.964251] env[62510]: value = "task-1768345" [ 1449.964251] env[62510]: _type = "Task" [ 1449.964251] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.974687] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768345, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.003651] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.003780] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.004064] env[62510]: DEBUG nova.network.neutron [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1450.004297] env[62510]: DEBUG nova.objects.instance [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lazy-loading 'info_cache' on Instance uuid 12768001-6ed0-47be-8f20-c59ee82b842a {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1450.101235] env[62510]: ERROR nova.scheduler.client.report [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [req-3175deb7-82d2-4972-a1e1-a91473e3510a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3175deb7-82d2-4972-a1e1-a91473e3510a"}]} [ 1450.106038] env[62510]: DEBUG nova.network.neutron [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Updating instance_info_cache with network_info: [{"id": "7477b8b7-c766-4c58-a1dc-9db9f24198b6", "address": "fa:16:3e:02:33:4d", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7477b8b7-c7", "ovs_interfaceid": "7477b8b7-c766-4c58-a1dc-9db9f24198b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.122819] env[62510]: DEBUG nova.scheduler.client.report [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1450.145073] env[62510]: DEBUG nova.scheduler.client.report [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1450.145073] env[62510]: DEBUG nova.compute.provider_tree [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1450.161611] env[62510]: DEBUG nova.scheduler.client.report [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1450.189996] env[62510]: DEBUG nova.scheduler.client.report [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1450.350504] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.468s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.421609] env[62510]: DEBUG oslo_vmware.api [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768344, 'name': PowerOnVM_Task, 'duration_secs': 0.782375} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.421876] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1450.422091] env[62510]: DEBUG nova.compute.manager [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1450.422869] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8499949e-39d7-4e09-870a-96ffd05abd16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.445904] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1450.446334] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1ec6acc-4f60-496d-9731-8b695fb32c01 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.460247] env[62510]: DEBUG oslo_vmware.api [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1450.460247] env[62510]: value = "task-1768346" [ 1450.460247] env[62510]: _type = "Task" [ 1450.460247] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.471729] env[62510]: DEBUG oslo_vmware.api [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768346, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.480344] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768345, 'name': Rename_Task, 'duration_secs': 0.149809} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.480717] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1450.480867] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-035b2616-17e0-41e4-b836-667a2de2ed12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.489715] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1450.489715] env[62510]: value = "task-1768347" [ 1450.489715] env[62510]: _type = "Task" [ 1450.489715] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.497419] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768347, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.611733] env[62510]: DEBUG nova.network.neutron [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Updated VIF entry in instance network info cache for port af2efe8c-492c-4033-b300-295761787dee. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1450.612071] env[62510]: DEBUG nova.network.neutron [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Updating instance_info_cache with network_info: [{"id": "af2efe8c-492c-4033-b300-295761787dee", "address": "fa:16:3e:34:7f:d9", "network": {"id": "4e82b2ba-c971-4518-a64c-536e98c8a34d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1899115936-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.241", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03842d36a3404265b14f6b70db40c1fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "16e15a36-a55b-4c27-b864-f284339009d0", "external-id": "nsx-vlan-transportzone-616", "segmentation_id": 616, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf2efe8c-49", "ovs_interfaceid": "af2efe8c-492c-4033-b300-295761787dee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.613992] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "refresh_cache-9a1a0428-8ccd-4614-8853-ef3eeec23d55" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.613992] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Instance network_info: |[{"id": "7477b8b7-c766-4c58-a1dc-9db9f24198b6", "address": "fa:16:3e:02:33:4d", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7477b8b7-c7", "ovs_interfaceid": "7477b8b7-c766-4c58-a1dc-9db9f24198b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1450.613992] env[62510]: DEBUG oslo_concurrency.lockutils [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] Acquired lock "refresh_cache-9a1a0428-8ccd-4614-8853-ef3eeec23d55" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.614315] env[62510]: DEBUG nova.network.neutron [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Refreshing network info cache for port 7477b8b7-c766-4c58-a1dc-9db9f24198b6 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.615581] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:33:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7477b8b7-c766-4c58-a1dc-9db9f24198b6', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1450.623428] env[62510]: DEBUG oslo.service.loopingcall [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.626905] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1450.628123] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95cbdd46-72bb-4043-9d44-ecbde7b42549 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.655997] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1450.655997] env[62510]: value = "task-1768348" [ 1450.655997] env[62510]: _type = "Task" [ 1450.655997] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.667380] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768348, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.855324] env[62510]: DEBUG nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1450.893574] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46fa1f09-96b8-48a0-a087-6cf5cc849153 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.905059] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08604191-3394-4528-a262-0599919bae42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.945914] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3affd9-f5e3-4909-be7f-157cc19beaa1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.951155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.955813] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d297b39f-4342-41f3-960c-d15ade86f442 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.968411] env[62510]: DEBUG oslo_vmware.api [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768346, 'name': PowerOnVM_Task, 'duration_secs': 0.389508} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.976490] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1450.976765] env[62510]: DEBUG nova.compute.manager [None req-f9d43b2f-bac7-4cc5-a92a-b4b0b139124e tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1450.977630] env[62510]: DEBUG nova.compute.provider_tree [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1450.979526] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa48b3cc-e99e-4fa3-af21-a140912532ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.999735] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768347, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.125577] env[62510]: DEBUG oslo_concurrency.lockutils [req-32fdb1cf-0b1e-4e59-b8ba-1f3037551abb req-171008d3-d3fa-485f-b39b-7e6da46e3c18 service nova] Releasing lock "refresh_cache-8bbafd7f-cdd1-4246-a509-2f97a6f78497" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.167984] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768348, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.340309] env[62510]: DEBUG nova.network.neutron [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [{"id": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "address": "fa:16:3e:81:65:65", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7489ebb6-ec", "ovs_interfaceid": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.382365] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.423886] env[62510]: DEBUG nova.network.neutron [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Updated VIF entry in instance network info cache for port 7477b8b7-c766-4c58-a1dc-9db9f24198b6. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1451.423886] env[62510]: DEBUG nova.network.neutron [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Updating instance_info_cache with network_info: [{"id": "7477b8b7-c766-4c58-a1dc-9db9f24198b6", "address": "fa:16:3e:02:33:4d", "network": {"id": "434ea666-edd6-4865-9204-8bb2cb6b5b91", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-946168541-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "96bb2a761e354152ba4690456124f6b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7477b8b7-c7", "ovs_interfaceid": "7477b8b7-c766-4c58-a1dc-9db9f24198b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.503494] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768347, 'name': PowerOnVM_Task, 'duration_secs': 0.755051} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.503814] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1451.503990] env[62510]: INFO nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Took 9.16 seconds to spawn the instance on the hypervisor. [ 1451.504211] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1451.504977] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc4767a-c976-408a-a76d-e66ef5766f0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.520729] env[62510]: DEBUG nova.scheduler.client.report [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 49 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1451.520967] env[62510]: DEBUG nova.compute.provider_tree [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 49 to 50 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1451.521152] env[62510]: DEBUG nova.compute.provider_tree [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1451.584290] env[62510]: DEBUG nova.network.neutron [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Successfully updated port: 1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1451.667551] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768348, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.843804] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.845935] env[62510]: DEBUG nova.objects.instance [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lazy-loading 'migration_context' on Instance uuid 12768001-6ed0-47be-8f20-c59ee82b842a {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1451.927461] env[62510]: DEBUG oslo_concurrency.lockutils [req-e447076f-36a6-44cc-8a32-55932d06b25a req-f60b1475-c261-4aeb-9baf-2fc243184ea0 service nova] Releasing lock "refresh_cache-9a1a0428-8ccd-4614-8853-ef3eeec23d55" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.023432] env[62510]: INFO nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Took 35.46 seconds to build instance. [ 1452.029955] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.311s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.030910] env[62510]: DEBUG nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1452.034034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.483s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.036981] env[62510]: INFO nova.compute.claims [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1452.087993] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.087993] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.087993] env[62510]: DEBUG nova.network.neutron [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1452.170533] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768348, 'name': CreateVM_Task, 'duration_secs': 1.335416} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.171131] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1452.172672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.172979] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.173479] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1452.174723] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ebd1395-aa87-49f9-bc9c-c337c4cc2a99 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.180102] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1452.180102] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b4ebc8-eac2-33a0-9175-c6bf143dec4a" [ 1452.180102] env[62510]: _type = "Task" [ 1452.180102] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.187902] env[62510]: DEBUG nova.compute.manager [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Received event network-vif-plugged-1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1452.187902] env[62510]: DEBUG oslo_concurrency.lockutils [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] Acquiring lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.187902] env[62510]: DEBUG oslo_concurrency.lockutils [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.187902] env[62510]: DEBUG oslo_concurrency.lockutils [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.187902] env[62510]: DEBUG nova.compute.manager [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] No waiting events found dispatching network-vif-plugged-1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1452.188592] env[62510]: WARNING nova.compute.manager [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Received unexpected event network-vif-plugged-1e000a52-1d14-40cd-a33b-51dd1c0196b5 for instance with vm_state building and task_state spawning. [ 1452.188879] env[62510]: DEBUG nova.compute.manager [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Received event network-changed-1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1452.189311] env[62510]: DEBUG nova.compute.manager [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Refreshing instance network info cache due to event network-changed-1e000a52-1d14-40cd-a33b-51dd1c0196b5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1452.189597] env[62510]: DEBUG oslo_concurrency.lockutils [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] Acquiring lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.193839] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b4ebc8-eac2-33a0-9175-c6bf143dec4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.209847] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.349954] env[62510]: DEBUG nova.objects.base [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Object Instance<12768001-6ed0-47be-8f20-c59ee82b842a> lazy-loaded attributes: info_cache,migration_context {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1452.351048] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c6795f-ea96-4276-81c5-e694b6ffc983 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.373372] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-820a4010-d151-4c3c-9002-7bd80abf5204 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.380748] env[62510]: DEBUG oslo_vmware.api [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1452.380748] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52fdff5b-fa05-9176-ae32-effcc1a37fd7" [ 1452.380748] env[62510]: _type = "Task" [ 1452.380748] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.391862] env[62510]: DEBUG oslo_vmware.api [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fdff5b-fa05-9176-ae32-effcc1a37fd7, 'name': SearchDatastore_Task, 'duration_secs': 0.007019} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.392184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.525916] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.613s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.543360] env[62510]: DEBUG nova.compute.utils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1452.556793] env[62510]: DEBUG nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1452.556793] env[62510]: DEBUG nova.network.neutron [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1452.599838] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.599838] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.634086] env[62510]: DEBUG nova.network.neutron [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1452.650260] env[62510]: DEBUG nova.policy [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '635c1339dcc74d98adf84fbf48042083', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e39188afd4e94f01a5b3f1ec78cf70e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1452.690861] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b4ebc8-eac2-33a0-9175-c6bf143dec4a, 'name': SearchDatastore_Task, 'duration_secs': 0.011195} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.693452] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.695776] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.695776] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.695776] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.695776] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1452.695776] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2407f68-867f-4373-975c-f119a41c997c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.703284] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1452.703471] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1452.704235] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0be3ca3d-0e2e-4a7b-b392-cd07dbaaa74d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.711070] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1452.711070] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52560088-f312-9e4f-ab26-fe118c6a0b42" [ 1452.711070] env[62510]: _type = "Task" [ 1452.711070] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.719361] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52560088-f312-9e4f-ab26-fe118c6a0b42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.820379] env[62510]: DEBUG nova.network.neutron [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Updating instance_info_cache with network_info: [{"id": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "address": "fa:16:3e:67:fa:eb", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e000a52-1d", "ovs_interfaceid": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.032669] env[62510]: DEBUG nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1453.060506] env[62510]: DEBUG nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1453.227972] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52560088-f312-9e4f-ab26-fe118c6a0b42, 'name': SearchDatastore_Task, 'duration_secs': 0.010738} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.228880] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86147770-f8be-407b-be44-386c0458a37e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.238929] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1453.238929] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520c6777-14bf-4183-afe1-6bf32bcca106" [ 1453.238929] env[62510]: _type = "Task" [ 1453.238929] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.250268] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520c6777-14bf-4183-afe1-6bf32bcca106, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.273887] env[62510]: INFO nova.compute.manager [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Rebuilding instance [ 1453.314399] env[62510]: DEBUG nova.compute.manager [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1453.315390] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1938d6b-5ea0-4c7a-abbe-d3caf0612af7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.322735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Releasing lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.323054] env[62510]: DEBUG nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Instance network_info: |[{"id": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "address": "fa:16:3e:67:fa:eb", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e000a52-1d", "ovs_interfaceid": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1453.327963] env[62510]: DEBUG oslo_concurrency.lockutils [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] Acquired lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.328611] env[62510]: DEBUG nova.network.neutron [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Refreshing network info cache for port 1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1453.330185] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:fa:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e000a52-1d14-40cd-a33b-51dd1c0196b5', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1453.337859] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Creating folder: Project (98d6c12eccf74757b3cbc2c8acddeb19). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1453.338551] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44da1feb-2171-4ab4-be37-38d201ae1478 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.349175] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Created folder: Project (98d6c12eccf74757b3cbc2c8acddeb19) in parent group-v367197. [ 1453.349378] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Creating folder: Instances. Parent ref: group-v367249. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1453.351963] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3627210-6731-41c0-81fc-4821dc498acf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.363665] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Created folder: Instances in parent group-v367249. [ 1453.363960] env[62510]: DEBUG oslo.service.loopingcall [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.364182] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1453.364398] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17c9012f-8577-4c04-9612-f8d366fe75a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.395030] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1453.395030] env[62510]: value = "task-1768351" [ 1453.395030] env[62510]: _type = "Task" [ 1453.395030] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.408110] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768351, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.559638] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.711322] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6006b4f-be22-4faf-ba4e-f09887962866 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.720843] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1207f19c-0af4-4211-a26d-8d6d94632b46 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.756910] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55523fdc-a02d-4b38-a632-662bc11b102d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.765281] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520c6777-14bf-4183-afe1-6bf32bcca106, 'name': SearchDatastore_Task, 'duration_secs': 0.028022} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.768082] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.768082] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9a1a0428-8ccd-4614-8853-ef3eeec23d55/9a1a0428-8ccd-4614-8853-ef3eeec23d55.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1453.768554] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77209303-979d-4f43-9384-6e2d2821d105 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.771601] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3963f0-5a82-4963-a5c6-559b549a332a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.786933] env[62510]: DEBUG nova.compute.provider_tree [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.789946] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1453.789946] env[62510]: value = "task-1768352" [ 1453.789946] env[62510]: _type = "Task" [ 1453.789946] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.798603] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.906620] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768351, 'name': CreateVM_Task, 'duration_secs': 0.510207} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.906814] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1453.907596] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.907830] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.908279] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1453.908680] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae10d1e-8e74-424d-85f0-f917c3dfc1a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.914570] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1453.914570] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52457703-a624-a180-82fe-69723ee15e63" [ 1453.914570] env[62510]: _type = "Task" [ 1453.914570] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.927217] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52457703-a624-a180-82fe-69723ee15e63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.930297] env[62510]: DEBUG nova.network.neutron [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Successfully created port: 36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1454.067583] env[62510]: DEBUG nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1454.077886] env[62510]: DEBUG nova.network.neutron [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Updated VIF entry in instance network info cache for port 1e000a52-1d14-40cd-a33b-51dd1c0196b5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1454.078405] env[62510]: DEBUG nova.network.neutron [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Updating instance_info_cache with network_info: [{"id": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "address": "fa:16:3e:67:fa:eb", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e000a52-1d", "ovs_interfaceid": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1454.100028] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1454.100310] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1454.100480] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1454.100711] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1454.100838] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1454.100967] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1454.101215] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1454.101375] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1454.101663] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1454.101803] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1454.102473] env[62510]: DEBUG nova.virt.hardware [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1454.103745] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dc3cfc-3601-4e05-b47a-59746eb6a9ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.113929] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55cd756-5426-4090-9525-653702a1b66e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.292477] env[62510]: DEBUG nova.scheduler.client.report [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1454.313473] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768352, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516372} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.313742] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9a1a0428-8ccd-4614-8853-ef3eeec23d55/9a1a0428-8ccd-4614-8853-ef3eeec23d55.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1454.313973] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1454.314548] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5685ea3-915a-4ff7-a43d-b75355feb2ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.324879] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1454.324879] env[62510]: value = "task-1768353" [ 1454.324879] env[62510]: _type = "Task" [ 1454.324879] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.332895] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.343214] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1454.343550] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0bcc7ace-9387-4f05-9028-0e020b9f54cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.350252] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1454.350252] env[62510]: value = "task-1768354" [ 1454.350252] env[62510]: _type = "Task" [ 1454.350252] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.359621] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.431280] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52457703-a624-a180-82fe-69723ee15e63, 'name': SearchDatastore_Task, 'duration_secs': 0.01124} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.431280] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.431280] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1454.431280] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1454.431646] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1454.431646] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1454.431844] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce22bde3-448b-46f3-8143-c141acd76067 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.440944] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1454.440944] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1454.441570] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4411d9b-edd5-4533-9e8a-adbb0ae42503 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.446835] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1454.446835] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52879254-74a2-c2dd-d586-68a60e5baca2" [ 1454.446835] env[62510]: _type = "Task" [ 1454.446835] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.456450] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52879254-74a2-c2dd-d586-68a60e5baca2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.581814] env[62510]: DEBUG oslo_concurrency.lockutils [req-df5f7308-9fa5-44c9-872c-ba3fe456347c req-50704395-6c0c-47e8-bad9-a0395806fd24 service nova] Releasing lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.807023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.773s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.807693] env[62510]: DEBUG nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1454.814230] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.225s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.814478] env[62510]: DEBUG nova.objects.instance [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1454.836489] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070788} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.836978] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.841252] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d48bcd2-2ef3-44b8-b29a-8b8a962bc201 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.887650] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 9a1a0428-8ccd-4614-8853-ef3eeec23d55/9a1a0428-8ccd-4614-8853-ef3eeec23d55.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.892845] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bf312e5-64a3-4443-8dfa-cf53e7cd3db4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.928725] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768354, 'name': PowerOffVM_Task, 'duration_secs': 0.167202} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.930672] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1454.931063] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1454.931550] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1454.931550] env[62510]: value = "task-1768355" [ 1454.931550] env[62510]: _type = "Task" [ 1454.931550] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.932678] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ec0404-1e9a-404e-87d2-f1389c03e4ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.948183] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1454.958131] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f122a236-6517-42b0-a61f-06b084575de7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.960694] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.970548] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52879254-74a2-c2dd-d586-68a60e5baca2, 'name': SearchDatastore_Task, 'duration_secs': 0.011654} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.971778] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44f4f0f5-4fa4-4cb8-a03e-db2b3f9f7741 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.979274] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1454.979274] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d59ad7-779a-b0f2-43c9-87086a462b63" [ 1454.979274] env[62510]: _type = "Task" [ 1454.979274] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.984400] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1454.984615] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1454.984821] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Deleting the datastore file [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1454.987784] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7644bd7-67fe-477c-bb0c-af19e895b49c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.989553] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d59ad7-779a-b0f2-43c9-87086a462b63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.993889] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1454.993889] env[62510]: value = "task-1768357" [ 1454.993889] env[62510]: _type = "Task" [ 1454.993889] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.002084] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768357, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.314941] env[62510]: DEBUG nova.compute.utils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1455.316543] env[62510]: DEBUG nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1455.316763] env[62510]: DEBUG nova.network.neutron [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1455.376839] env[62510]: DEBUG nova.policy [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '635c1339dcc74d98adf84fbf48042083', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e39188afd4e94f01a5b3f1ec78cf70e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1455.451811] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768355, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.493110] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d59ad7-779a-b0f2-43c9-87086a462b63, 'name': SearchDatastore_Task, 'duration_secs': 0.047383} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.493261] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.493544] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] cfe53f9c-d78b-4af7-b991-f3549c03f22d/cfe53f9c-d78b-4af7-b991-f3549c03f22d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1455.493842] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d07dac1-fab4-429a-b88b-0434595d1d23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.514400] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1455.514400] env[62510]: value = "task-1768358" [ 1455.514400] env[62510]: _type = "Task" [ 1455.514400] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.514567] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768357, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475522} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.514847] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1455.515094] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1455.515296] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1455.530030] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.716150] env[62510]: DEBUG nova.network.neutron [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Successfully created port: 13e613d5-690a-4054-8ca3-e768cb4625ea {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1455.819660] env[62510]: DEBUG nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1455.824583] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db23a048-f6c2-409d-954f-8bf2cc0c8c95 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.825487] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.086s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.826142] env[62510]: DEBUG nova.objects.instance [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lazy-loading 'resources' on Instance uuid eb840df4-edc1-44cb-84c9-f31b7b56b6bd {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1455.951471] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768355, 'name': ReconfigVM_Task, 'duration_secs': 1.010213} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.951829] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 9a1a0428-8ccd-4614-8853-ef3eeec23d55/9a1a0428-8ccd-4614-8853-ef3eeec23d55.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1455.952615] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3a0d052-f48d-4c17-a44c-e374e83631c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.962540] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1455.962540] env[62510]: value = "task-1768359" [ 1455.962540] env[62510]: _type = "Task" [ 1455.962540] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.971859] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768359, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.032574] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768358, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.473352] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768359, 'name': Rename_Task, 'duration_secs': 0.216836} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.476404] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1456.477290] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebabe875-fae1-4154-ac0c-bec5d2b2ce71 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.484624] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1456.484624] env[62510]: value = "task-1768360" [ 1456.484624] env[62510]: _type = "Task" [ 1456.484624] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.497593] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768360, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.530568] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768358, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773566} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.530980] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] cfe53f9c-d78b-4af7-b991-f3549c03f22d/cfe53f9c-d78b-4af7-b991-f3549c03f22d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1456.531372] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1456.532122] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87ac0250-da76-4052-80d6-2a442b0f7087 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.546129] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1456.546129] env[62510]: value = "task-1768361" [ 1456.546129] env[62510]: _type = "Task" [ 1456.546129] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.553451] env[62510]: DEBUG nova.network.neutron [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Successfully updated port: 36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1456.562331] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.576682] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1456.578243] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1456.579094] env[62510]: DEBUG nova.virt.hardware [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1456.579635] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f3bed7-6d3d-4baf-b340-103e338cdc61 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.592919] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dface956-2e0f-42f5-a118-cbd7d0d230eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.608594] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1456.614849] env[62510]: DEBUG oslo.service.loopingcall [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1456.618504] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1456.619092] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6222b7e-6ad8-4413-bf77-5ecb1da730ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.642299] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1456.642299] env[62510]: value = "task-1768362" [ 1456.642299] env[62510]: _type = "Task" [ 1456.642299] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.652363] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768362, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.842529] env[62510]: DEBUG nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1456.878885] env[62510]: DEBUG nova.compute.manager [req-23b9c589-a9a7-4c2a-aff1-3b5c45a13e2e req-df67d507-5551-450c-a8c9-bcfa8b7da1a3 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Received event network-vif-plugged-36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1456.879169] env[62510]: DEBUG oslo_concurrency.lockutils [req-23b9c589-a9a7-4c2a-aff1-3b5c45a13e2e req-df67d507-5551-450c-a8c9-bcfa8b7da1a3 service nova] Acquiring lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.879417] env[62510]: DEBUG oslo_concurrency.lockutils [req-23b9c589-a9a7-4c2a-aff1-3b5c45a13e2e req-df67d507-5551-450c-a8c9-bcfa8b7da1a3 service nova] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1456.879634] env[62510]: DEBUG oslo_concurrency.lockutils [req-23b9c589-a9a7-4c2a-aff1-3b5c45a13e2e req-df67d507-5551-450c-a8c9-bcfa8b7da1a3 service nova] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1456.879849] env[62510]: DEBUG nova.compute.manager [req-23b9c589-a9a7-4c2a-aff1-3b5c45a13e2e req-df67d507-5551-450c-a8c9-bcfa8b7da1a3 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] No waiting events found dispatching network-vif-plugged-36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1456.880473] env[62510]: WARNING nova.compute.manager [req-23b9c589-a9a7-4c2a-aff1-3b5c45a13e2e req-df67d507-5551-450c-a8c9-bcfa8b7da1a3 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Received unexpected event network-vif-plugged-36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 for instance with vm_state building and task_state spawning. [ 1456.886072] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1456.886308] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1456.886504] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1456.886706] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1456.886879] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1456.891090] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1456.891634] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1456.891634] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1456.891860] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1456.892064] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1456.892264] env[62510]: DEBUG nova.virt.hardware [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1456.893595] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0b78a4-d78e-47a6-ba13-66a0ef3a0b1c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.905611] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb811948-9c4c-4d34-b04c-a7d58b992641 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.998436] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768360, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.004304] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e925b81-d6fa-49b9-a5b1-e3b80124f620 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.012542] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b724ff2e-fbe8-4e99-b871-4beaad1280eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.047163] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15bc0b1b-865a-4367-bbb0-dce2adc1d90a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.061651] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10186} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.062334] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1457.064026] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7391697e-f18e-46ed-99fb-e95d1ebd9438 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.068093] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d50253-147f-44cd-a9f3-1c021e47bd0a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.073015] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "refresh_cache-e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.073204] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "refresh_cache-e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.073390] env[62510]: DEBUG nova.network.neutron [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1457.096744] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] cfe53f9c-d78b-4af7-b991-f3549c03f22d/cfe53f9c-d78b-4af7-b991-f3549c03f22d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1457.106498] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6fce30fb-079c-4cf0-b718-61fd8461ab84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.122055] env[62510]: DEBUG nova.compute.provider_tree [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.129421] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1457.129421] env[62510]: value = "task-1768363" [ 1457.129421] env[62510]: _type = "Task" [ 1457.129421] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.139600] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768363, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.151863] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768362, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.207084] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1457.207323] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1457.497576] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768360, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.625642] env[62510]: DEBUG nova.scheduler.client.report [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1457.630423] env[62510]: DEBUG nova.network.neutron [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1457.657752] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.664063] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768362, 'name': CreateVM_Task, 'duration_secs': 0.666528} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.664302] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1457.664832] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.665152] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.665285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1457.665548] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f9bf545-e45b-4bde-8409-20895b18605c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.670919] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1457.670919] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529f8e32-7254-e231-8bf2-61987f21c31b" [ 1457.670919] env[62510]: _type = "Task" [ 1457.670919] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.681940] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529f8e32-7254-e231-8bf2-61987f21c31b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.692543] env[62510]: DEBUG nova.network.neutron [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Successfully updated port: 13e613d5-690a-4054-8ca3-e768cb4625ea {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1457.935127] env[62510]: DEBUG nova.network.neutron [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Updating instance_info_cache with network_info: [{"id": "36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25", "address": "fa:16:3e:7c:5b:6c", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36a0ae52-84", "ovs_interfaceid": "36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.997931] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768360, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.012568] env[62510]: DEBUG nova.compute.manager [req-54e6b8ce-e434-412e-ad44-a547179a8d33 req-e16ed365-d531-48e6-9f4f-5ed4d4a0add9 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Received event network-vif-plugged-13e613d5-690a-4054-8ca3-e768cb4625ea {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1458.012888] env[62510]: DEBUG oslo_concurrency.lockutils [req-54e6b8ce-e434-412e-ad44-a547179a8d33 req-e16ed365-d531-48e6-9f4f-5ed4d4a0add9 service nova] Acquiring lock "7cc6d4a6-2765-44e7-b378-e213a562593d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.013028] env[62510]: DEBUG oslo_concurrency.lockutils [req-54e6b8ce-e434-412e-ad44-a547179a8d33 req-e16ed365-d531-48e6-9f4f-5ed4d4a0add9 service nova] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.013209] env[62510]: DEBUG oslo_concurrency.lockutils [req-54e6b8ce-e434-412e-ad44-a547179a8d33 req-e16ed365-d531-48e6-9f4f-5ed4d4a0add9 service nova] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.013383] env[62510]: DEBUG nova.compute.manager [req-54e6b8ce-e434-412e-ad44-a547179a8d33 req-e16ed365-d531-48e6-9f4f-5ed4d4a0add9 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] No waiting events found dispatching network-vif-plugged-13e613d5-690a-4054-8ca3-e768cb4625ea {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1458.013563] env[62510]: WARNING nova.compute.manager [req-54e6b8ce-e434-412e-ad44-a547179a8d33 req-e16ed365-d531-48e6-9f4f-5ed4d4a0add9 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Received unexpected event network-vif-plugged-13e613d5-690a-4054-8ca3-e768cb4625ea for instance with vm_state building and task_state spawning. [ 1458.139017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.311s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.143688] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.759s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.144059] env[62510]: DEBUG nova.objects.instance [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lazy-loading 'resources' on Instance uuid 731e7110-9709-4c4e-96d2-00e21e67c6e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1458.161107] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768363, 'name': ReconfigVM_Task, 'duration_secs': 0.722338} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.162654] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Reconfigured VM instance instance-00000014 to attach disk [datastore1] cfe53f9c-d78b-4af7-b991-f3549c03f22d/cfe53f9c-d78b-4af7-b991-f3549c03f22d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1458.164202] env[62510]: INFO nova.scheduler.client.report [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Deleted allocations for instance eb840df4-edc1-44cb-84c9-f31b7b56b6bd [ 1458.171244] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffd68bac-c79f-4b3e-b9bb-4206aa60733b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.187222] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529f8e32-7254-e231-8bf2-61987f21c31b, 'name': SearchDatastore_Task, 'duration_secs': 0.03064} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.189031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.189031] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1458.189201] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.189351] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.189530] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1458.189854] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1458.189854] env[62510]: value = "task-1768364" [ 1458.189854] env[62510]: _type = "Task" [ 1458.189854] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.190060] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af9dd2fa-d7fc-4afe-970b-146f7f1b6446 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.196146] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "refresh_cache-7cc6d4a6-2765-44e7-b378-e213a562593d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.196305] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "refresh_cache-7cc6d4a6-2765-44e7-b378-e213a562593d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.196473] env[62510]: DEBUG nova.network.neutron [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1458.203901] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.204722] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768364, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.204939] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.208463] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1458.208702] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1458.209630] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde86d2c-0101-4183-a92f-aaf4b6319124 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.216712] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1458.216712] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cef4bf-a0d4-8d42-0626-9e296d75d033" [ 1458.216712] env[62510]: _type = "Task" [ 1458.216712] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.225848] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cef4bf-a0d4-8d42-0626-9e296d75d033, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.442017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "refresh_cache-e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.442382] env[62510]: DEBUG nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance network_info: |[{"id": "36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25", "address": "fa:16:3e:7c:5b:6c", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36a0ae52-84", "ovs_interfaceid": "36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1458.442888] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:5b:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1458.450438] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating folder: Project (e39188afd4e94f01a5b3f1ec78cf70e4). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1458.450721] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-212da549-e83c-4a02-9571-26388bdb35a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.464199] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created folder: Project (e39188afd4e94f01a5b3f1ec78cf70e4) in parent group-v367197. [ 1458.464418] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating folder: Instances. Parent ref: group-v367253. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1458.464697] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8084caab-bbd5-4e79-9f7f-4f3b37dd58c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.475634] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created folder: Instances in parent group-v367253. [ 1458.475884] env[62510]: DEBUG oslo.service.loopingcall [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1458.476097] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1458.476306] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-422c9b0c-2b7e-4256-bc86-b3f4450efd47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.501739] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768360, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.503362] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1458.503362] env[62510]: value = "task-1768367" [ 1458.503362] env[62510]: _type = "Task" [ 1458.503362] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.511710] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768367, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.664494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.664879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.677789] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5058cc1-cd5d-4e2e-9524-0c01d4953961 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "eb840df4-edc1-44cb-84c9-f31b7b56b6bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.961s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.706968] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768364, 'name': Rename_Task, 'duration_secs': 0.197662} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.709062] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1458.709062] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-710044b9-3f3c-4a27-bca0-925d9c9d5a6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.716990] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.724572] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1458.724572] env[62510]: value = "task-1768368" [ 1458.724572] env[62510]: _type = "Task" [ 1458.724572] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.734704] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cef4bf-a0d4-8d42-0626-9e296d75d033, 'name': SearchDatastore_Task, 'duration_secs': 0.014651} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.736124] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d61f85a-a6d3-4af6-ad64-2e275152fa0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.742684] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.746460] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1458.746460] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52042c9a-56e6-1a3f-a142-95abec0ad741" [ 1458.746460] env[62510]: _type = "Task" [ 1458.746460] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.760205] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52042c9a-56e6-1a3f-a142-95abec0ad741, 'name': SearchDatastore_Task, 'duration_secs': 0.011225} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.760800] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.760800] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1458.761023] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b5f7ea2-0392-4b77-96cb-cdeead94a84f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.768429] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1458.768429] env[62510]: value = "task-1768369" [ 1458.768429] env[62510]: _type = "Task" [ 1458.768429] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.775286] env[62510]: DEBUG nova.network.neutron [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1458.785465] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.015213] env[62510]: DEBUG oslo_vmware.api [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768360, 'name': PowerOnVM_Task, 'duration_secs': 2.282943} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.015586] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1459.015903] env[62510]: INFO nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Took 11.56 seconds to spawn the instance on the hypervisor. [ 1459.016446] env[62510]: DEBUG nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1459.017434] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0045cc4-a1a6-4b3a-a282-518b8cfcafe9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.024082] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768367, 'name': CreateVM_Task, 'duration_secs': 0.511724} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.026205] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1459.026205] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.026205] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.026205] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1459.026205] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c20df2f8-e62b-45c2-a768-a0805771742b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.039861] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1459.039861] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5228e87d-5ef9-e217-629b-92ebdbbe8e7c" [ 1459.039861] env[62510]: _type = "Task" [ 1459.039861] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.045119] env[62510]: DEBUG nova.network.neutron [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Updating instance_info_cache with network_info: [{"id": "13e613d5-690a-4054-8ca3-e768cb4625ea", "address": "fa:16:3e:c1:51:ab", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13e613d5-69", "ovs_interfaceid": "13e613d5-690a-4054-8ca3-e768cb4625ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.049843] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5228e87d-5ef9-e217-629b-92ebdbbe8e7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.209443] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1459.216370] env[62510]: DEBUG nova.compute.manager [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Received event network-changed-36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1459.216370] env[62510]: DEBUG nova.compute.manager [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Refreshing instance network info cache due to event network-changed-36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1459.216370] env[62510]: DEBUG oslo_concurrency.lockutils [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] Acquiring lock "refresh_cache-e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.216613] env[62510]: DEBUG oslo_concurrency.lockutils [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] Acquired lock "refresh_cache-e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.216708] env[62510]: DEBUG nova.network.neutron [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Refreshing network info cache for port 36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1459.239033] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768368, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.288366] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768369, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.397679] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d09175-ee47-4944-9f7c-1269a6ef36b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.410014] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e45b2c-ded3-4efb-add3-de5df06b0573 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.444834] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f16907-4cee-43ea-81ad-d8a9aca0b107 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.453701] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4417001-c65c-4376-8abc-546a267e6447 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.470553] env[62510]: DEBUG nova.compute.provider_tree [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.548161] env[62510]: INFO nova.compute.manager [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Took 37.99 seconds to build instance. [ 1459.551287] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "refresh_cache-7cc6d4a6-2765-44e7-b378-e213a562593d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.552621] env[62510]: DEBUG nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Instance network_info: |[{"id": "13e613d5-690a-4054-8ca3-e768cb4625ea", "address": "fa:16:3e:c1:51:ab", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13e613d5-69", "ovs_interfaceid": "13e613d5-690a-4054-8ca3-e768cb4625ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1459.556604] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:51:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13e613d5-690a-4054-8ca3-e768cb4625ea', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1459.566617] env[62510]: DEBUG oslo.service.loopingcall [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.566617] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5228e87d-5ef9-e217-629b-92ebdbbe8e7c, 'name': SearchDatastore_Task, 'duration_secs': 0.068261} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.566617] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1459.566617] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.566617] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1459.566617] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.566617] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.566823] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1459.568028] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b85dfb9-ea28-492b-aab9-9bb209bd2779 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.584503] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc6c406f-fc91-4707-a60e-00fafeaa0dbd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.592725] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1459.592725] env[62510]: value = "task-1768370" [ 1459.592725] env[62510]: _type = "Task" [ 1459.592725] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.594016] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.594236] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.598678] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5423df5a-69d7-4164-ad6c-efe6e43849a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.608269] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1459.608269] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5214e5ce-5ab2-a776-bab4-d1c271dbd415" [ 1459.608269] env[62510]: _type = "Task" [ 1459.608269] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.608564] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768370, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.624035] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5214e5ce-5ab2-a776-bab4-d1c271dbd415, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.719787] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.734443] env[62510]: DEBUG oslo_vmware.api [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768368, 'name': PowerOnVM_Task, 'duration_secs': 0.90323} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.735819] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1459.735819] env[62510]: INFO nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1459.735819] env[62510]: DEBUG nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1459.736804] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b814ed8-b93f-452a-8f72-d63053cbd90c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.781883] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768369, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565426} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.782175] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1459.785101] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1459.785101] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5604d4f-c56e-4ba6-b698-839558963e0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.792715] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1459.792715] env[62510]: value = "task-1768371" [ 1459.792715] env[62510]: _type = "Task" [ 1459.792715] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.798944] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768371, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.973934] env[62510]: DEBUG nova.scheduler.client.report [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1460.051519] env[62510]: DEBUG oslo_concurrency.lockutils [None req-155a7dcb-be93-4e50-88a4-f8c27724584b tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.107s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.109080] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768370, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.119439] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5214e5ce-5ab2-a776-bab4-d1c271dbd415, 'name': SearchDatastore_Task, 'duration_secs': 0.018745} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.120618] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc24cad8-910c-41ef-a5b4-6c4eb7fb3982 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.126832] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1460.126832] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521a8c7e-1478-16a2-9f3b-39f30285b91f" [ 1460.126832] env[62510]: _type = "Task" [ 1460.126832] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.135395] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521a8c7e-1478-16a2-9f3b-39f30285b91f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.264165] env[62510]: INFO nova.compute.manager [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Took 36.46 seconds to build instance. [ 1460.300992] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768371, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082202} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.301438] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1460.303086] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c2b32d-6511-442e-8cb2-cc248036b39c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.330075] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1460.330794] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c10e6885-c941-46ae-8f00-1df574eed9db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.359636] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1460.359636] env[62510]: value = "task-1768372" [ 1460.359636] env[62510]: _type = "Task" [ 1460.359636] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.371110] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768372, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.412033] env[62510]: DEBUG nova.network.neutron [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Updated VIF entry in instance network info cache for port 36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1460.412289] env[62510]: DEBUG nova.network.neutron [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Updating instance_info_cache with network_info: [{"id": "36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25", "address": "fa:16:3e:7c:5b:6c", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36a0ae52-84", "ovs_interfaceid": "36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1460.481089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.337s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.482622] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.450s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.484793] env[62510]: INFO nova.compute.claims [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1460.520497] env[62510]: INFO nova.scheduler.client.report [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Deleted allocations for instance 731e7110-9709-4c4e-96d2-00e21e67c6e3 [ 1460.557025] env[62510]: DEBUG nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1460.608022] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768370, 'name': CreateVM_Task, 'duration_secs': 0.83978} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.608022] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1460.608022] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.608022] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.608022] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1460.608022] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be42e05e-91ad-446e-bd0e-db805b22c088 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.612168] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1460.612168] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5266b0a7-34f5-8a8a-105a-2c0fd61c61f2" [ 1460.612168] env[62510]: _type = "Task" [ 1460.612168] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.620341] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5266b0a7-34f5-8a8a-105a-2c0fd61c61f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.639820] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521a8c7e-1478-16a2-9f3b-39f30285b91f, 'name': SearchDatastore_Task, 'duration_secs': 0.033412} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.639820] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.639820] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1460.639820] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3cc47f6-91f4-442e-b613-53a555753c7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.650085] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1460.650085] env[62510]: value = "task-1768373" [ 1460.650085] env[62510]: _type = "Task" [ 1460.650085] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.656807] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.767414] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f6adfec8-9205-4a1f-af34-b5de543683f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.206s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.825172] env[62510]: DEBUG nova.compute.manager [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Received event network-changed-13e613d5-690a-4054-8ca3-e768cb4625ea {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1460.825395] env[62510]: DEBUG nova.compute.manager [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Refreshing instance network info cache due to event network-changed-13e613d5-690a-4054-8ca3-e768cb4625ea. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1460.825615] env[62510]: DEBUG oslo_concurrency.lockutils [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] Acquiring lock "refresh_cache-7cc6d4a6-2765-44e7-b378-e213a562593d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1460.826096] env[62510]: DEBUG oslo_concurrency.lockutils [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] Acquired lock "refresh_cache-7cc6d4a6-2765-44e7-b378-e213a562593d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.826096] env[62510]: DEBUG nova.network.neutron [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Refreshing network info cache for port 13e613d5-690a-4054-8ca3-e768cb4625ea {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.869668] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.915850] env[62510]: DEBUG oslo_concurrency.lockutils [req-82981f77-d34e-484f-b753-7ee1b42073ef req-00e79d49-3371-4dd2-8c8c-104a77a4dba5 service nova] Releasing lock "refresh_cache-e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.031677] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2a23db72-f5bd-4457-b7f5-db3925b8fa29 tempest-ServerDiagnosticsV248Test-1525608884 tempest-ServerDiagnosticsV248Test-1525608884-project-member] Lock "731e7110-9709-4c4e-96d2-00e21e67c6e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.245s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.078169] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.127465] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5266b0a7-34f5-8a8a-105a-2c0fd61c61f2, 'name': SearchDatastore_Task, 'duration_secs': 0.025772} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.127465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.127465] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1461.127465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.127693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.127722] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1461.127980] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eadd7846-f1fa-406d-bbfa-96261c4a9084 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.142291] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1461.142551] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1461.143409] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4785af41-4dc6-4a52-8075-f7e446a43602 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.154175] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1461.154175] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52426fed-d259-4b53-c889-a9813439d430" [ 1461.154175] env[62510]: _type = "Task" [ 1461.154175] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.161200] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768373, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.166185] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52426fed-d259-4b53-c889-a9813439d430, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.270825] env[62510]: DEBUG nova.compute.manager [None req-e13b2b75-f901-45b9-9182-0913b2474a10 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 738d69b5-86b3-4f19-8291-9d38e7fab483] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1461.369552] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768372, 'name': ReconfigVM_Task, 'duration_secs': 0.58591} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.369697] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb/7490c825-dfd5-409c-9fd6-0e78643338fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1461.370817] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee33e5d0-f8d8-48d4-b15b-4d3f2171d0ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.378590] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1461.378590] env[62510]: value = "task-1768374" [ 1461.378590] env[62510]: _type = "Task" [ 1461.378590] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.392256] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768374, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.486264] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "f9eb5110-28ec-474e-b80e-0bfcee51483d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.486509] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.602411] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "c58184e7-bf4f-406b-a778-9b8f60740fe6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.602674] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.602842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "c58184e7-bf4f-406b-a778-9b8f60740fe6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.603060] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.603195] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.607044] env[62510]: INFO nova.compute.manager [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Terminating instance [ 1461.663389] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662872} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.663389] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1461.663389] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1461.663645] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93c6e02c-fcf4-4df0-8a57-80d732e82c91 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.669155] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52426fed-d259-4b53-c889-a9813439d430, 'name': SearchDatastore_Task, 'duration_secs': 0.055452} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.672624] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9495b8db-92de-4273-98f1-094e6cd156ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.676622] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1461.676622] env[62510]: value = "task-1768375" [ 1461.676622] env[62510]: _type = "Task" [ 1461.676622] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.681058] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1461.681058] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52125e68-eab5-a02b-3d7f-e233c6eec530" [ 1461.681058] env[62510]: _type = "Task" [ 1461.681058] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.688622] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.697673] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52125e68-eab5-a02b-3d7f-e233c6eec530, 'name': SearchDatastore_Task, 'duration_secs': 0.014061} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.697994] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.698435] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7cc6d4a6-2765-44e7-b378-e213a562593d/7cc6d4a6-2765-44e7-b378-e213a562593d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1461.698652] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-782b75e4-1f91-4dc1-b783-51a03091c949 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.704984] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1461.704984] env[62510]: value = "task-1768376" [ 1461.704984] env[62510]: _type = "Task" [ 1461.704984] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.714403] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.760353] env[62510]: DEBUG nova.network.neutron [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Updated VIF entry in instance network info cache for port 13e613d5-690a-4054-8ca3-e768cb4625ea. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.760353] env[62510]: DEBUG nova.network.neutron [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Updating instance_info_cache with network_info: [{"id": "13e613d5-690a-4054-8ca3-e768cb4625ea", "address": "fa:16:3e:c1:51:ab", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13e613d5-69", "ovs_interfaceid": "13e613d5-690a-4054-8ca3-e768cb4625ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.778027] env[62510]: DEBUG nova.compute.manager [None req-e13b2b75-f901-45b9-9182-0913b2474a10 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 738d69b5-86b3-4f19-8291-9d38e7fab483] Instance disappeared before build. {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1461.891161] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768374, 'name': Rename_Task, 'duration_secs': 0.222959} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.891441] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1461.891679] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e4f9599-0f23-40df-8d35-b5d906113d1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.899166] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Waiting for the task: (returnval){ [ 1461.899166] env[62510]: value = "task-1768377" [ 1461.899166] env[62510]: _type = "Task" [ 1461.899166] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.908784] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768377, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.114449] env[62510]: DEBUG nova.compute.manager [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1462.114449] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1462.115845] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1153c1-3b9d-481d-9719-f7fb820f02cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.124049] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1462.124469] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-edfa5210-03d7-435e-83ad-9fe93dbb773c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.137060] env[62510]: DEBUG oslo_vmware.api [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1462.137060] env[62510]: value = "task-1768378" [ 1462.137060] env[62510]: _type = "Task" [ 1462.137060] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.147975] env[62510]: DEBUG oslo_vmware.api [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.150289] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70574eea-811c-4074-9560-11fcb3838e75 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.159854] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa22bc09-6d87-478e-a1dc-0adf17b44955 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.200932] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb098ac-736b-4d25-bc34-703683f4c952 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.216071] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070856} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.218216] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59eea45-060e-4aa6-9205-b2f90f964fc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.223124] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1462.228184] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80222a8-01f7-492c-9845-18e6027869fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.231306] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768376, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.242200] env[62510]: DEBUG nova.compute.provider_tree [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.267240] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1462.267973] env[62510]: DEBUG oslo_concurrency.lockutils [req-9dbedfec-d2a2-4df2-9e66-42f4b7a2432d req-7977b553-729d-4a4d-965b-ce339171aee2 service nova] Releasing lock "refresh_cache-7cc6d4a6-2765-44e7-b378-e213a562593d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1462.268140] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-000e6a67-bc25-4d4c-8781-bef04f7d1b42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.293606] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1462.293606] env[62510]: value = "task-1768379" [ 1462.293606] env[62510]: _type = "Task" [ 1462.293606] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.303891] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768379, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.307019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e13b2b75-f901-45b9-9182-0913b2474a10 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "738d69b5-86b3-4f19-8291-9d38e7fab483" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.360s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.414299] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768377, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.651916] env[62510]: DEBUG oslo_vmware.api [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768378, 'name': PowerOffVM_Task, 'duration_secs': 0.340618} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.653748] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1462.653748] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1462.653748] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fa5bfa3-d1fc-4315-b663-a1b2d4784ed1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.717746] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768376, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.785242} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.718063] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7cc6d4a6-2765-44e7-b378-e213a562593d/7cc6d4a6-2765-44e7-b378-e213a562593d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1462.718293] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1462.718589] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5f36f3c-79df-42cc-af35-152f927434f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.722565] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1462.722966] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1462.723531] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleting the datastore file [datastore1] c58184e7-bf4f-406b-a778-9b8f60740fe6 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1462.724251] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7891bb8e-8396-4a24-98de-1f4288d8ff8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.728686] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1462.728686] env[62510]: value = "task-1768381" [ 1462.728686] env[62510]: _type = "Task" [ 1462.728686] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.734421] env[62510]: DEBUG oslo_vmware.api [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1462.734421] env[62510]: value = "task-1768382" [ 1462.734421] env[62510]: _type = "Task" [ 1462.734421] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.741773] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768381, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.747926] env[62510]: DEBUG oslo_vmware.api [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.772023] env[62510]: DEBUG nova.scheduler.client.report [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1462.772307] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "87d1d75e-41c4-42e6-bf58-deabb71400e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.772524] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.805399] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768379, 'name': ReconfigVM_Task, 'duration_secs': 0.419817} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.809019] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Reconfigured VM instance instance-00000015 to attach disk [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1462.809019] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4465613b-a4c1-4f6a-b5de-8a6eb0d86f89 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.810592] env[62510]: DEBUG nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1462.816099] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1462.816099] env[62510]: value = "task-1768383" [ 1462.816099] env[62510]: _type = "Task" [ 1462.816099] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.828603] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768383, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.912853] env[62510]: DEBUG oslo_vmware.api [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Task: {'id': task-1768377, 'name': PowerOnVM_Task, 'duration_secs': 0.707991} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.913776] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1462.918163] env[62510]: DEBUG nova.compute.manager [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1462.919059] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf9e05a-b746-4265-9322-05dba02248c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.226131] env[62510]: DEBUG nova.compute.manager [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Received event network-changed-1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1463.226131] env[62510]: DEBUG nova.compute.manager [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Refreshing instance network info cache due to event network-changed-1e000a52-1d14-40cd-a33b-51dd1c0196b5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1463.226131] env[62510]: DEBUG oslo_concurrency.lockutils [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] Acquiring lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.226131] env[62510]: DEBUG oslo_concurrency.lockutils [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] Acquired lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.227122] env[62510]: DEBUG nova.network.neutron [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Refreshing network info cache for port 1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1463.253546] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768381, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07556} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.259064] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1463.262069] env[62510]: DEBUG oslo_vmware.api [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.262069] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0440f056-7a97-4229-986e-4ecae4eec2fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.291304] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.807s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.291304] env[62510]: DEBUG nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1463.311939] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 7cc6d4a6-2765-44e7-b378-e213a562593d/7cc6d4a6-2765-44e7-b378-e213a562593d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1463.312937] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.083s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.314749] env[62510]: INFO nova.compute.claims [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1463.318124] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89f05d1b-cfa2-40cb-9a9b-11ed6a33ce72 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.340272] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.340840] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.341772] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.342146] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.342432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.362019] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768383, 'name': Rename_Task, 'duration_secs': 0.14183} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.363847] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1463.364307] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1463.364307] env[62510]: value = "task-1768384" [ 1463.364307] env[62510]: _type = "Task" [ 1463.364307] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.364796] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93a40c25-be46-4895-adb3-70577487cd33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.377779] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.378984] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.380903] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1463.380903] env[62510]: value = "task-1768385" [ 1463.380903] env[62510]: _type = "Task" [ 1463.380903] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.387293] env[62510]: INFO nova.compute.manager [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Terminating instance [ 1463.392698] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768385, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.444928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.750107] env[62510]: DEBUG oslo_vmware.api [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.617186} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.750421] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1463.750562] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1463.750734] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1463.751012] env[62510]: INFO nova.compute.manager [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1463.751408] env[62510]: DEBUG oslo.service.loopingcall [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1463.751661] env[62510]: DEBUG nova.compute.manager [-] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1463.751758] env[62510]: DEBUG nova.network.neutron [-] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1463.814928] env[62510]: DEBUG nova.compute.utils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1463.816431] env[62510]: DEBUG nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1463.817055] env[62510]: DEBUG nova.network.neutron [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1463.880955] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768384, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.892296] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768385, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.896018] env[62510]: DEBUG nova.compute.manager [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1463.896018] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1463.896817] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db7a61a-a554-49aa-8307-a1852878ee08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.904870] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1463.905554] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b89cce2e-e7f0-4cd1-aa20-3e9033c2a1bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.913236] env[62510]: DEBUG oslo_vmware.api [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1463.913236] env[62510]: value = "task-1768386" [ 1463.913236] env[62510]: _type = "Task" [ 1463.913236] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.919182] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "7490c825-dfd5-409c-9fd6-0e78643338fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.919182] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "7490c825-dfd5-409c-9fd6-0e78643338fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.919746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "7490c825-dfd5-409c-9fd6-0e78643338fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.920102] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "7490c825-dfd5-409c-9fd6-0e78643338fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.920429] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "7490c825-dfd5-409c-9fd6-0e78643338fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.923306] env[62510]: INFO nova.compute.manager [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Terminating instance [ 1463.935210] env[62510]: DEBUG oslo_vmware.api [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.950722] env[62510]: DEBUG nova.policy [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f149624c585d4349b4947c11cfd396fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed6c1bc3c82d48938b88d98c181f9200', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1464.154813] env[62510]: DEBUG nova.network.neutron [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Updated VIF entry in instance network info cache for port 1e000a52-1d14-40cd-a33b-51dd1c0196b5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1464.155067] env[62510]: DEBUG nova.network.neutron [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Updating instance_info_cache with network_info: [{"id": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "address": "fa:16:3e:67:fa:eb", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e000a52-1d", "ovs_interfaceid": "1e000a52-1d14-40cd-a33b-51dd1c0196b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.320417] env[62510]: DEBUG nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1464.381443] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768384, 'name': ReconfigVM_Task, 'duration_secs': 0.758913} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.384556] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 7cc6d4a6-2765-44e7-b378-e213a562593d/7cc6d4a6-2765-44e7-b378-e213a562593d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1464.387268] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-347281ec-697d-432c-964e-55027ab7fd34 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.398372] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768385, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.398843] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1464.398843] env[62510]: value = "task-1768387" [ 1464.398843] env[62510]: _type = "Task" [ 1464.398843] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.411791] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768387, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.421256] env[62510]: DEBUG oslo_vmware.api [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768386, 'name': PowerOffVM_Task, 'duration_secs': 0.251623} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.423935] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1464.424129] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1464.424639] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7d28e9c-da27-4492-8d63-8a667daf94c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.437220] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "refresh_cache-7490c825-dfd5-409c-9fd6-0e78643338fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.437407] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquired lock "refresh_cache-7490c825-dfd5-409c-9fd6-0e78643338fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.437619] env[62510]: DEBUG nova.network.neutron [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1464.507082] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1464.507351] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1464.507484] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleting the datastore file [datastore1] 35a98028-0fc6-4e13-b50d-5dacf205dbe5 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1464.507741] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ed33727-9f9a-47de-a698-b4d405a36c95 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.515731] env[62510]: DEBUG oslo_vmware.api [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1464.515731] env[62510]: value = "task-1768389" [ 1464.515731] env[62510]: _type = "Task" [ 1464.515731] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.525318] env[62510]: DEBUG oslo_vmware.api [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.529205] env[62510]: DEBUG nova.compute.manager [req-b43ba371-613f-4540-8db8-faecbf52f728 req-d1d05d9b-be2c-40f0-b09f-ef6c11fab8c8 service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Received event network-vif-deleted-9dffe699-6570-4729-8d57-b8ea05dff25d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1464.529205] env[62510]: INFO nova.compute.manager [req-b43ba371-613f-4540-8db8-faecbf52f728 req-d1d05d9b-be2c-40f0-b09f-ef6c11fab8c8 service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Neutron deleted interface 9dffe699-6570-4729-8d57-b8ea05dff25d; detaching it from the instance and deleting it from the info cache [ 1464.529205] env[62510]: DEBUG nova.network.neutron [req-b43ba371-613f-4540-8db8-faecbf52f728 req-d1d05d9b-be2c-40f0-b09f-ef6c11fab8c8 service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1464.657565] env[62510]: DEBUG oslo_concurrency.lockutils [req-4ffc856a-271b-4e61-8c42-673df87c995c req-96c86e22-fd61-4ddf-a8db-3317c1ea283b service nova] Releasing lock "refresh_cache-cfe53f9c-d78b-4af7-b991-f3549c03f22d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.900582] env[62510]: DEBUG oslo_vmware.api [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768385, 'name': PowerOnVM_Task, 'duration_secs': 1.067943} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.903656] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1464.903857] env[62510]: INFO nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Took 10.84 seconds to spawn the instance on the hypervisor. [ 1464.904052] env[62510]: DEBUG nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1464.904822] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d2911b-2b25-48f4-822a-0b1ff6747d50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.911850] env[62510]: DEBUG nova.network.neutron [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Successfully created port: 74d14948-d303-4a00-ad07-26f85011c249 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1464.920656] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768387, 'name': Rename_Task, 'duration_secs': 0.224254} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.921254] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1464.921715] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b55c6e84-4482-461b-ab2f-5c7477428cb9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.934046] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1464.934046] env[62510]: value = "task-1768390" [ 1464.934046] env[62510]: _type = "Task" [ 1464.934046] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.946992] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768390, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.967765] env[62510]: DEBUG nova.network.neutron [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1465.004229] env[62510]: DEBUG nova.network.neutron [-] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.026755] env[62510]: DEBUG oslo_vmware.api [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.464474} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.027066] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1465.027339] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1465.027571] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1465.027769] env[62510]: INFO nova.compute.manager [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1465.028083] env[62510]: DEBUG oslo.service.loopingcall [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1465.028305] env[62510]: DEBUG nova.compute.manager [-] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1465.028412] env[62510]: DEBUG nova.network.neutron [-] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1465.032414] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c2a87f-d066-4676-a6e2-7e9951b47aed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.037883] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4eb91e5-26f4-4374-827e-a0157d735622 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.046357] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f33880-2fbb-49fa-8424-4605796b6ed1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.052784] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa14591-1672-4bc0-baa4-79c0dfa60184 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.105665] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437561cd-ce00-4fc4-9df9-39c89ea4ebef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.109020] env[62510]: DEBUG nova.compute.manager [req-b43ba371-613f-4540-8db8-faecbf52f728 req-d1d05d9b-be2c-40f0-b09f-ef6c11fab8c8 service nova] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Detach interface failed, port_id=9dffe699-6570-4729-8d57-b8ea05dff25d, reason: Instance c58184e7-bf4f-406b-a778-9b8f60740fe6 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1465.112037] env[62510]: DEBUG nova.network.neutron [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.116829] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8041f85b-bc5a-4fec-9b8f-b28f2315c455 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.131273] env[62510]: DEBUG nova.compute.provider_tree [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.334254] env[62510]: DEBUG nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1465.390833] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1465.391076] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1465.391239] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1465.391421] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1465.391557] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1465.391697] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1465.392072] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1465.392344] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1465.392532] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1465.392696] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1465.392861] env[62510]: DEBUG nova.virt.hardware [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1465.393737] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dafb2ff-819d-43d2-b580-38580bc7f904 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.402845] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdf2f93-3aec-4fb0-895f-6146093027dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.433060] env[62510]: INFO nova.compute.manager [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Took 39.29 seconds to build instance. [ 1465.446302] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768390, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.508164] env[62510]: INFO nova.compute.manager [-] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Took 1.76 seconds to deallocate network for instance. [ 1465.613886] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Releasing lock "refresh_cache-7490c825-dfd5-409c-9fd6-0e78643338fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.614342] env[62510]: DEBUG nova.compute.manager [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1465.614567] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1465.616068] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68770fa-5096-41f9-a1e2-dc72dd1ecead {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.624701] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1465.624801] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4c7c540-412a-4063-aff0-9f0ff850f624 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.631014] env[62510]: DEBUG oslo_vmware.api [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1465.631014] env[62510]: value = "task-1768391" [ 1465.631014] env[62510]: _type = "Task" [ 1465.631014] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.635359] env[62510]: DEBUG nova.scheduler.client.report [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1465.647929] env[62510]: DEBUG oslo_vmware.api [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.936091] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f2e67d8e-419d-474a-8a51-bfa4e74727a1 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.251s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.947857] env[62510]: DEBUG oslo_vmware.api [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768390, 'name': PowerOnVM_Task, 'duration_secs': 0.616834} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.948523] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1465.948761] env[62510]: INFO nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1465.948946] env[62510]: DEBUG nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1465.949775] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560d23e0-073f-41d5-9043-f3485d1a678b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.015494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.016631] env[62510]: DEBUG nova.compute.manager [req-cb38df8d-9e12-4537-a02b-eef007fc2355 req-9437251d-5794-44a4-ad58-edfc776e7539 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Received event network-vif-deleted-210d5dee-24d1-4f38-b4b0-d1b78b6180ed {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1466.016820] env[62510]: INFO nova.compute.manager [req-cb38df8d-9e12-4537-a02b-eef007fc2355 req-9437251d-5794-44a4-ad58-edfc776e7539 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Neutron deleted interface 210d5dee-24d1-4f38-b4b0-d1b78b6180ed; detaching it from the instance and deleting it from the info cache [ 1466.016995] env[62510]: DEBUG nova.network.neutron [req-cb38df8d-9e12-4537-a02b-eef007fc2355 req-9437251d-5794-44a4-ad58-edfc776e7539 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.142553] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.829s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.142671] env[62510]: DEBUG nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1466.145205] env[62510]: DEBUG oslo_vmware.api [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768391, 'name': PowerOffVM_Task, 'duration_secs': 0.255708} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.145676] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.883s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.145916] env[62510]: DEBUG nova.objects.instance [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lazy-loading 'resources' on Instance uuid 5588650b-c450-489a-a456-3b580a5b9114 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1466.149700] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1466.149700] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1466.149700] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b79f4dda-953c-4450-9cc0-1d4aa66ced74 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.179023] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1466.179023] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1466.179023] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Deleting the datastore file [datastore1] 7490c825-dfd5-409c-9fd6-0e78643338fb {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1466.179023] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c42bc238-1c04-4e77-afd7-bc0689092238 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.185726] env[62510]: DEBUG oslo_vmware.api [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for the task: (returnval){ [ 1466.185726] env[62510]: value = "task-1768393" [ 1466.185726] env[62510]: _type = "Task" [ 1466.185726] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.197652] env[62510]: DEBUG oslo_vmware.api [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768393, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.258563] env[62510]: DEBUG nova.network.neutron [-] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.441968] env[62510]: DEBUG nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1466.477183] env[62510]: INFO nova.compute.manager [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Took 39.95 seconds to build instance. [ 1466.523919] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ebc1add-5d92-4f52-96dc-510e3f2c2a49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.532108] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd879a1-d4d7-4559-97c4-27b7a601caff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.568791] env[62510]: DEBUG nova.compute.manager [req-cb38df8d-9e12-4537-a02b-eef007fc2355 req-9437251d-5794-44a4-ad58-edfc776e7539 service nova] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Detach interface failed, port_id=210d5dee-24d1-4f38-b4b0-d1b78b6180ed, reason: Instance 35a98028-0fc6-4e13-b50d-5dacf205dbe5 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1466.615705] env[62510]: DEBUG nova.compute.manager [req-c0fc901e-16c1-47c3-a841-3a8be26aecfc req-830af6d2-b5de-4850-b7ca-97e8bb79056d service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Received event network-vif-plugged-74d14948-d303-4a00-ad07-26f85011c249 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1466.616860] env[62510]: DEBUG oslo_concurrency.lockutils [req-c0fc901e-16c1-47c3-a841-3a8be26aecfc req-830af6d2-b5de-4850-b7ca-97e8bb79056d service nova] Acquiring lock "d42295c9-2b0e-471e-9a87-1d7367de9588-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.616860] env[62510]: DEBUG oslo_concurrency.lockutils [req-c0fc901e-16c1-47c3-a841-3a8be26aecfc req-830af6d2-b5de-4850-b7ca-97e8bb79056d service nova] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.616860] env[62510]: DEBUG oslo_concurrency.lockutils [req-c0fc901e-16c1-47c3-a841-3a8be26aecfc req-830af6d2-b5de-4850-b7ca-97e8bb79056d service nova] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.616860] env[62510]: DEBUG nova.compute.manager [req-c0fc901e-16c1-47c3-a841-3a8be26aecfc req-830af6d2-b5de-4850-b7ca-97e8bb79056d service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] No waiting events found dispatching network-vif-plugged-74d14948-d303-4a00-ad07-26f85011c249 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1466.617135] env[62510]: WARNING nova.compute.manager [req-c0fc901e-16c1-47c3-a841-3a8be26aecfc req-830af6d2-b5de-4850-b7ca-97e8bb79056d service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Received unexpected event network-vif-plugged-74d14948-d303-4a00-ad07-26f85011c249 for instance with vm_state building and task_state spawning. [ 1466.641354] env[62510]: DEBUG nova.network.neutron [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Successfully updated port: 74d14948-d303-4a00-ad07-26f85011c249 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1466.649464] env[62510]: DEBUG nova.compute.utils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1466.649464] env[62510]: DEBUG nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1466.649464] env[62510]: DEBUG nova.network.neutron [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1466.696484] env[62510]: DEBUG oslo_vmware.api [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Task: {'id': task-1768393, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17928} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.699583] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1466.699583] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1466.699583] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1466.699894] env[62510]: INFO nova.compute.manager [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1466.700169] env[62510]: DEBUG oslo.service.loopingcall [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1466.702011] env[62510]: DEBUG nova.policy [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '89e7119b63cf4355b156f63f60b6e1f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b99476c2f77248e9a99d756b2bc12577', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1466.703774] env[62510]: DEBUG nova.compute.manager [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1466.703929] env[62510]: DEBUG nova.network.neutron [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1466.727680] env[62510]: DEBUG nova.network.neutron [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1466.761238] env[62510]: INFO nova.compute.manager [-] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Took 1.73 seconds to deallocate network for instance. [ 1466.970379] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.985653] env[62510]: DEBUG oslo_concurrency.lockutils [None req-87e8f7ba-742e-4d05-b195-35a5187f9f86 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.691s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.991025] env[62510]: DEBUG nova.network.neutron [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Successfully created port: 23a05e54-a02b-4cd8-8812-f13c57329785 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1467.149956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.150081] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquired lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.150277] env[62510]: DEBUG nova.network.neutron [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1467.158586] env[62510]: DEBUG nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1467.228796] env[62510]: DEBUG nova.network.neutron [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.267847] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.281934] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39627db2-6842-4f4e-9094-263ac7bfd58d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.291760] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c61c50-a359-4b75-9520-d536c22d7d52 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.326011] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1208d5-0d8e-4f65-8732-9ddca374ad0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.333544] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf02c83-7061-479c-960f-b9773dcab246 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.348706] env[62510]: DEBUG nova.compute.provider_tree [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.488199] env[62510]: DEBUG nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1467.712082] env[62510]: DEBUG nova.network.neutron [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1467.734023] env[62510]: INFO nova.compute.manager [-] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Took 1.03 seconds to deallocate network for instance. [ 1467.851802] env[62510]: DEBUG nova.scheduler.client.report [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1467.894770] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.894770] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.986131] env[62510]: DEBUG nova.network.neutron [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Updating instance_info_cache with network_info: [{"id": "74d14948-d303-4a00-ad07-26f85011c249", "address": "fa:16:3e:9f:48:35", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74d14948-d3", "ovs_interfaceid": "74d14948-d303-4a00-ad07-26f85011c249", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1468.013169] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.169310] env[62510]: DEBUG nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1468.199213] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1468.199706] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1468.199706] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1468.199837] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1468.199953] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1468.200112] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1468.200325] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1468.200485] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1468.200651] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1468.200828] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1468.200973] env[62510]: DEBUG nova.virt.hardware [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1468.201864] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1c0cdf-3af6-4543-a800-cccb7fb55756 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.210614] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5111e1-1123-480e-8cf6-5d064efbf462 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.241190] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.358393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.361567] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.229s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.365102] env[62510]: INFO nova.compute.claims [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1468.382033] env[62510]: INFO nova.scheduler.client.report [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted allocations for instance 5588650b-c450-489a-a456-3b580a5b9114 [ 1468.490591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Releasing lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.490591] env[62510]: DEBUG nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Instance network_info: |[{"id": "74d14948-d303-4a00-ad07-26f85011c249", "address": "fa:16:3e:9f:48:35", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74d14948-d3", "ovs_interfaceid": "74d14948-d303-4a00-ad07-26f85011c249", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1468.491449] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:48:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74d14948-d303-4a00-ad07-26f85011c249', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1468.506563] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Creating folder: Project (ed6c1bc3c82d48938b88d98c181f9200). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1468.507262] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-684f37e7-f59b-420e-8f66-c5d7704a8052 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.517987] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Created folder: Project (ed6c1bc3c82d48938b88d98c181f9200) in parent group-v367197. [ 1468.518217] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Creating folder: Instances. Parent ref: group-v367257. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1468.518455] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b3f9271-89f8-4469-b8f5-8b1f12386e8b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.528987] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Created folder: Instances in parent group-v367257. [ 1468.529041] env[62510]: DEBUG oslo.service.loopingcall [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.529332] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1468.529621] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8687ecfa-fe83-424b-820f-28db07bc1d78 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.547557] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1468.547557] env[62510]: value = "task-1768396" [ 1468.547557] env[62510]: _type = "Task" [ 1468.547557] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.556155] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768396, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.572448] env[62510]: DEBUG nova.network.neutron [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Successfully updated port: 23a05e54-a02b-4cd8-8812-f13c57329785 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1468.764333] env[62510]: DEBUG nova.compute.manager [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Received event network-changed-74d14948-d303-4a00-ad07-26f85011c249 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1468.764555] env[62510]: DEBUG nova.compute.manager [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Refreshing instance network info cache due to event network-changed-74d14948-d303-4a00-ad07-26f85011c249. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1468.764801] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Acquiring lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1468.764903] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Acquired lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1468.767200] env[62510]: DEBUG nova.network.neutron [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Refreshing network info cache for port 74d14948-d303-4a00-ad07-26f85011c249 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1468.893102] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f46c7689-32a8-4938-b334-4e3028a6e8d8 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "5588650b-c450-489a-a456-3b580a5b9114" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.337s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.058445] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768396, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.075472] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.075707] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquired lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.075881] env[62510]: DEBUG nova.network.neutron [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1469.503350] env[62510]: DEBUG nova.network.neutron [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Updated VIF entry in instance network info cache for port 74d14948-d303-4a00-ad07-26f85011c249. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1469.504329] env[62510]: DEBUG nova.network.neutron [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Updating instance_info_cache with network_info: [{"id": "74d14948-d303-4a00-ad07-26f85011c249", "address": "fa:16:3e:9f:48:35", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74d14948-d3", "ovs_interfaceid": "74d14948-d303-4a00-ad07-26f85011c249", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.558320] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768396, 'name': CreateVM_Task, 'duration_secs': 0.88181} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.558493] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1469.559158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.559323] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.559640] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1469.559888] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf6df95a-4d1e-4a6b-8055-d3e910fb1d04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.565493] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1469.565493] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5210c0a4-6a4a-5c7b-2be0-70e7cca97eb0" [ 1469.565493] env[62510]: _type = "Task" [ 1469.565493] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.575165] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5210c0a4-6a4a-5c7b-2be0-70e7cca97eb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.610964] env[62510]: DEBUG nova.network.neutron [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1469.757924] env[62510]: DEBUG nova.network.neutron [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Updating instance_info_cache with network_info: [{"id": "23a05e54-a02b-4cd8-8812-f13c57329785", "address": "fa:16:3e:08:b6:8a", "network": {"id": "44a1c2ba-fa0e-43cd-96c4-1adaa5cc1504", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1916477814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99476c2f77248e9a99d756b2bc12577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a05e54-a0", "ovs_interfaceid": "23a05e54-a02b-4cd8-8812-f13c57329785", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1469.899030] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9076841d-9ba6-43d1-95b2-3d6d15f01794 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.907153] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed437a5-9b20-41d6-a730-c8de956a61c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.937452] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d00d4f4-a5b1-4033-8cc3-32ab38e164f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.944785] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f95d5b1-98f0-4c79-8edc-decf5da2c45e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.961371] env[62510]: DEBUG nova.compute.provider_tree [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.006643] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Releasing lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.006729] env[62510]: DEBUG nova.compute.manager [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Received event network-vif-plugged-23a05e54-a02b-4cd8-8812-f13c57329785 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1470.006954] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Acquiring lock "0a940fd0-73cc-403d-9afc-a989c67dfdef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.007191] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.007355] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.007534] env[62510]: DEBUG nova.compute.manager [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] No waiting events found dispatching network-vif-plugged-23a05e54-a02b-4cd8-8812-f13c57329785 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1470.007686] env[62510]: WARNING nova.compute.manager [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Received unexpected event network-vif-plugged-23a05e54-a02b-4cd8-8812-f13c57329785 for instance with vm_state building and task_state spawning. [ 1470.007892] env[62510]: DEBUG nova.compute.manager [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Received event network-changed-23a05e54-a02b-4cd8-8812-f13c57329785 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1470.007993] env[62510]: DEBUG nova.compute.manager [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Refreshing instance network info cache due to event network-changed-23a05e54-a02b-4cd8-8812-f13c57329785. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1470.008245] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Acquiring lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.077879] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5210c0a4-6a4a-5c7b-2be0-70e7cca97eb0, 'name': SearchDatastore_Task, 'duration_secs': 0.037369} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.078206] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.078442] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1470.078664] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.078803] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.078975] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1470.079263] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0f6874e-ce5e-4c53-8486-aa9aac23fc57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.091712] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1470.091915] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1470.092760] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26407fb4-ff02-48ba-b0ac-6f7cfcc138fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.098712] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1470.098712] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b329fc-cf22-fc91-beba-37c9e5dc0292" [ 1470.098712] env[62510]: _type = "Task" [ 1470.098712] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.109688] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b329fc-cf22-fc91-beba-37c9e5dc0292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.260255] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Releasing lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.260639] env[62510]: DEBUG nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Instance network_info: |[{"id": "23a05e54-a02b-4cd8-8812-f13c57329785", "address": "fa:16:3e:08:b6:8a", "network": {"id": "44a1c2ba-fa0e-43cd-96c4-1adaa5cc1504", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1916477814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99476c2f77248e9a99d756b2bc12577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a05e54-a0", "ovs_interfaceid": "23a05e54-a02b-4cd8-8812-f13c57329785", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1470.261041] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Acquired lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.261348] env[62510]: DEBUG nova.network.neutron [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Refreshing network info cache for port 23a05e54-a02b-4cd8-8812-f13c57329785 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1470.263020] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:b6:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7edb7c08-2fae-4df5-9ec6-5ccf06d7e337', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23a05e54-a02b-4cd8-8812-f13c57329785', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1470.276119] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Creating folder: Project (b99476c2f77248e9a99d756b2bc12577). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1470.279185] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-493aa745-3f6c-4f34-b748-4a3ad2ec2f86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.291638] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Created folder: Project (b99476c2f77248e9a99d756b2bc12577) in parent group-v367197. [ 1470.292481] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Creating folder: Instances. Parent ref: group-v367260. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1470.292481] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f594e63c-41b1-4a4e-b890-26cebb2eea79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.305025] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Created folder: Instances in parent group-v367260. [ 1470.305025] env[62510]: DEBUG oslo.service.loopingcall [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1470.305025] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1470.305025] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3950c96b-3bd5-4c19-b659-a65c4099d5f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.326570] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1470.326570] env[62510]: value = "task-1768399" [ 1470.326570] env[62510]: _type = "Task" [ 1470.326570] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.334867] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768399, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.465832] env[62510]: DEBUG nova.scheduler.client.report [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1470.520859] env[62510]: DEBUG nova.network.neutron [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Updated VIF entry in instance network info cache for port 23a05e54-a02b-4cd8-8812-f13c57329785. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.521340] env[62510]: DEBUG nova.network.neutron [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Updating instance_info_cache with network_info: [{"id": "23a05e54-a02b-4cd8-8812-f13c57329785", "address": "fa:16:3e:08:b6:8a", "network": {"id": "44a1c2ba-fa0e-43cd-96c4-1adaa5cc1504", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1916477814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99476c2f77248e9a99d756b2bc12577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a05e54-a0", "ovs_interfaceid": "23a05e54-a02b-4cd8-8812-f13c57329785", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.613050] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b329fc-cf22-fc91-beba-37c9e5dc0292, 'name': SearchDatastore_Task, 'duration_secs': 0.011454} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.614022] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbaa14c9-60a0-46f6-b1e4-ba95ba85caa7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.621161] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1470.621161] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527857af-c505-f402-787f-7908560c81d4" [ 1470.621161] env[62510]: _type = "Task" [ 1470.621161] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.627576] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527857af-c505-f402-787f-7908560c81d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.836712] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768399, 'name': CreateVM_Task, 'duration_secs': 0.332707} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.836888] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1470.837562] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.837728] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.838057] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1470.838310] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b52f0385-b86d-43e3-b9d6-b76e20831d55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.843052] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1470.843052] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e16d47-c2aa-f6d0-34df-ca7818ed5431" [ 1470.843052] env[62510]: _type = "Task" [ 1470.843052] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.850169] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e16d47-c2aa-f6d0-34df-ca7818ed5431, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.972510] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.611s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.973051] env[62510]: DEBUG nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1470.975826] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.813s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.977153] env[62510]: INFO nova.compute.claims [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.027579] env[62510]: DEBUG oslo_concurrency.lockutils [req-3f36eb4e-707f-479f-9596-d926b9085904 req-1b0b5d07-9859-4e37-ab6c-66023c35c9f0 service nova] Releasing lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.133050] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527857af-c505-f402-787f-7908560c81d4, 'name': SearchDatastore_Task, 'duration_secs': 0.009247} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.133050] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.133050] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] d42295c9-2b0e-471e-9a87-1d7367de9588/d42295c9-2b0e-471e-9a87-1d7367de9588.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1471.133050] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0037b032-0497-47c1-b953-c8a90a29f1e2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.137363] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1471.137363] env[62510]: value = "task-1768400" [ 1471.137363] env[62510]: _type = "Task" [ 1471.137363] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.145120] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768400, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.353887] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e16d47-c2aa-f6d0-34df-ca7818ed5431, 'name': SearchDatastore_Task, 'duration_secs': 0.00908} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.354306] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.354556] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1471.354788] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.354928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.355149] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.356569] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77537dde-edee-4397-ba44-1895752858bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.367590] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.367778] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1471.368707] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a98bed50-674f-4773-8f11-2feedc6084c7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.376256] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1471.376256] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b1fddd-420a-9100-57e3-f9f2066cc191" [ 1471.376256] env[62510]: _type = "Task" [ 1471.376256] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.391203] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b1fddd-420a-9100-57e3-f9f2066cc191, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.482731] env[62510]: DEBUG nova.compute.utils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1471.486395] env[62510]: DEBUG nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1471.486594] env[62510]: DEBUG nova.network.neutron [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1471.570736] env[62510]: DEBUG nova.policy [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef61a999fed843dab89b52878d27e52e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00aa62de97a24e57890fd14f412e0244', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1471.649246] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768400, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504106} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.649246] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] d42295c9-2b0e-471e-9a87-1d7367de9588/d42295c9-2b0e-471e-9a87-1d7367de9588.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1471.649246] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1471.649246] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37adc861-848e-41f3-826a-f01e2bad406c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.656137] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1471.656137] env[62510]: value = "task-1768401" [ 1471.656137] env[62510]: _type = "Task" [ 1471.656137] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.672297] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768401, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.886874] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b1fddd-420a-9100-57e3-f9f2066cc191, 'name': SearchDatastore_Task, 'duration_secs': 0.045358} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.887907] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df27419e-65b9-45fc-98ef-52c8a3676750 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.896921] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1471.896921] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5261756b-055d-27fa-25bd-7f24d6147f34" [ 1471.896921] env[62510]: _type = "Task" [ 1471.896921] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.913070] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5261756b-055d-27fa-25bd-7f24d6147f34, 'name': SearchDatastore_Task, 'duration_secs': 0.011215} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.913454] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.913734] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0a940fd0-73cc-403d-9afc-a989c67dfdef/0a940fd0-73cc-403d-9afc-a989c67dfdef.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1471.914024] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f3e332e-c5cc-46ff-9545-453e5f3a0210 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.922411] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1471.922411] env[62510]: value = "task-1768402" [ 1471.922411] env[62510]: _type = "Task" [ 1471.922411] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.927781] env[62510]: DEBUG nova.network.neutron [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Successfully created port: 71218680-7c53-442a-ab27-cfa4db01f20c {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1471.935109] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.987075] env[62510]: DEBUG nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1472.171518] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768401, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068933} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.171836] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.173121] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd156619-c05f-4d2f-b2d6-1bc673034ba8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.204753] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] d42295c9-2b0e-471e-9a87-1d7367de9588/d42295c9-2b0e-471e-9a87-1d7367de9588.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.207571] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2fe010b-833f-4a9e-a16c-e2b86944c52d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.230797] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1472.230797] env[62510]: value = "task-1768403" [ 1472.230797] env[62510]: _type = "Task" [ 1472.230797] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.240013] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768403, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.435358] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768402, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499546} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.436070] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0a940fd0-73cc-403d-9afc-a989c67dfdef/0a940fd0-73cc-403d-9afc-a989c67dfdef.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1472.436349] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1472.436629] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb33cf90-1228-4757-9b10-b6f21e1a28b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.443266] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1472.443266] env[62510]: value = "task-1768404" [ 1472.443266] env[62510]: _type = "Task" [ 1472.443266] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.456362] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.625315] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f862f5b-9aa8-4366-9bb2-64b3caca4a1e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.635422] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c611e2-25fd-43cd-b803-f7e53aaf4138 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.674946] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c170f409-1e36-4890-bb10-0d622f0fa113 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.683452] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a843eead-0bb6-4ba8-9dac-945db26edab9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.698415] env[62510]: DEBUG nova.compute.provider_tree [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1472.741097] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768403, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.955839] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066334} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.956171] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.957185] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1099f4bc-b645-4e92-bc34-cfb8f196f1ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.980630] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 0a940fd0-73cc-403d-9afc-a989c67dfdef/0a940fd0-73cc-403d-9afc-a989c67dfdef.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.980974] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8393d600-e076-46be-bf42-d222498268f3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.001958] env[62510]: DEBUG nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1473.006037] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1473.006037] env[62510]: value = "task-1768405" [ 1473.006037] env[62510]: _type = "Task" [ 1473.006037] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.014034] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768405, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.029775] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1473.030034] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1473.030195] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1473.030382] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1473.030585] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1473.030690] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1473.030895] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1473.031062] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1473.031229] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1473.031393] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1473.031564] env[62510]: DEBUG nova.virt.hardware [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1473.032605] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7320d864-cb4a-428a-9025-0f3f96911b3a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.040225] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e18812a-4aed-477b-8d58-daadb4bca45f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.201451] env[62510]: DEBUG nova.scheduler.client.report [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1473.241685] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768403, 'name': ReconfigVM_Task, 'duration_secs': 0.699848} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.241991] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Reconfigured VM instance instance-00000017 to attach disk [datastore1] d42295c9-2b0e-471e-9a87-1d7367de9588/d42295c9-2b0e-471e-9a87-1d7367de9588.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1473.242882] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbd5b0f6-43fb-4984-b32c-64dd2984074e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.250583] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1473.250583] env[62510]: value = "task-1768406" [ 1473.250583] env[62510]: _type = "Task" [ 1473.250583] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.259278] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768406, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.436151] env[62510]: DEBUG nova.compute.manager [req-f5b28866-59e3-403f-9bc7-e59ab051e6fa req-9b08f1b4-2c05-463b-931c-4e8f219c149b service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Received event network-vif-plugged-71218680-7c53-442a-ab27-cfa4db01f20c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1473.436428] env[62510]: DEBUG oslo_concurrency.lockutils [req-f5b28866-59e3-403f-9bc7-e59ab051e6fa req-9b08f1b4-2c05-463b-931c-4e8f219c149b service nova] Acquiring lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.436744] env[62510]: DEBUG oslo_concurrency.lockutils [req-f5b28866-59e3-403f-9bc7-e59ab051e6fa req-9b08f1b4-2c05-463b-931c-4e8f219c149b service nova] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.436744] env[62510]: DEBUG oslo_concurrency.lockutils [req-f5b28866-59e3-403f-9bc7-e59ab051e6fa req-9b08f1b4-2c05-463b-931c-4e8f219c149b service nova] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.436905] env[62510]: DEBUG nova.compute.manager [req-f5b28866-59e3-403f-9bc7-e59ab051e6fa req-9b08f1b4-2c05-463b-931c-4e8f219c149b service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] No waiting events found dispatching network-vif-plugged-71218680-7c53-442a-ab27-cfa4db01f20c {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1473.437505] env[62510]: WARNING nova.compute.manager [req-f5b28866-59e3-403f-9bc7-e59ab051e6fa req-9b08f1b4-2c05-463b-931c-4e8f219c149b service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Received unexpected event network-vif-plugged-71218680-7c53-442a-ab27-cfa4db01f20c for instance with vm_state building and task_state spawning. [ 1473.491785] env[62510]: DEBUG nova.network.neutron [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Successfully updated port: 71218680-7c53-442a-ab27-cfa4db01f20c {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1473.521313] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768405, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.707578] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.732s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.707942] env[62510]: DEBUG nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1473.710665] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.760s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.710844] env[62510]: DEBUG nova.objects.instance [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1473.766832] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768406, 'name': Rename_Task, 'duration_secs': 0.145948} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.767132] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1473.767406] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-824e70dd-1fd7-4951-aeed-58f11ec51794 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.774500] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1473.774500] env[62510]: value = "task-1768407" [ 1473.774500] env[62510]: _type = "Task" [ 1473.774500] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.784177] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.998810] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "refresh_cache-b7c2c768-573b-4c1c-ade7-45fb87b95d41" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.999098] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired lock "refresh_cache-b7c2c768-573b-4c1c-ade7-45fb87b95d41" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.999202] env[62510]: DEBUG nova.network.neutron [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.017395] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768405, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.214907] env[62510]: DEBUG nova.compute.utils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1474.220675] env[62510]: DEBUG nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1474.220856] env[62510]: DEBUG nova.network.neutron [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1474.281775] env[62510]: DEBUG nova.policy [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1474.289745] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768407, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.518535] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768405, 'name': ReconfigVM_Task, 'duration_secs': 1.505376} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.519387] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 0a940fd0-73cc-403d-9afc-a989c67dfdef/0a940fd0-73cc-403d-9afc-a989c67dfdef.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1474.520056] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f5b3b96-e27c-4403-b87a-2cfe0c26e197 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.529297] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1474.529297] env[62510]: value = "task-1768408" [ 1474.529297] env[62510]: _type = "Task" [ 1474.529297] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.536336] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768408, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.565166] env[62510]: DEBUG nova.network.neutron [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1474.724756] env[62510]: DEBUG nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1474.729727] env[62510]: DEBUG oslo_concurrency.lockutils [None req-80f0bf22-0b28-497d-a9e9-3b8a97f7f26c tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.737565] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.355s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.741932] env[62510]: INFO nova.compute.claims [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1474.762044] env[62510]: DEBUG nova.network.neutron [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Successfully created port: a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1474.788643] env[62510]: DEBUG oslo_vmware.api [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768407, 'name': PowerOnVM_Task, 'duration_secs': 0.747659} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.789688] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1474.789941] env[62510]: INFO nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1474.794222] env[62510]: DEBUG nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1474.795235] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839d2f39-e32f-4fa8-959c-8ffd0f4acef5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.806799] env[62510]: DEBUG nova.network.neutron [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Updating instance_info_cache with network_info: [{"id": "71218680-7c53-442a-ab27-cfa4db01f20c", "address": "fa:16:3e:6d:75:e4", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71218680-7c", "ovs_interfaceid": "71218680-7c53-442a-ab27-cfa4db01f20c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.037413] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768408, 'name': Rename_Task, 'duration_secs': 0.299856} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.037685] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1475.037922] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bf4df4d-77eb-4020-801d-8970ac766620 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.047142] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1475.047142] env[62510]: value = "task-1768409" [ 1475.047142] env[62510]: _type = "Task" [ 1475.047142] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.055033] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768409, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.313879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Releasing lock "refresh_cache-b7c2c768-573b-4c1c-ade7-45fb87b95d41" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.315751] env[62510]: DEBUG nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Instance network_info: |[{"id": "71218680-7c53-442a-ab27-cfa4db01f20c", "address": "fa:16:3e:6d:75:e4", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71218680-7c", "ovs_interfaceid": "71218680-7c53-442a-ab27-cfa4db01f20c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1475.316542] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:75:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71218680-7c53-442a-ab27-cfa4db01f20c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1475.327099] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Creating folder: Project (00aa62de97a24e57890fd14f412e0244). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1475.327857] env[62510]: INFO nova.compute.manager [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Took 46.33 seconds to build instance. [ 1475.329143] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-431bfcda-3ac5-4545-b386-cc6f315f4863 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.343292] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Created folder: Project (00aa62de97a24e57890fd14f412e0244) in parent group-v367197. [ 1475.343292] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Creating folder: Instances. Parent ref: group-v367263. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1475.343292] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-247b3d26-5111-4f2b-a747-88ad669ecd9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.352249] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Created folder: Instances in parent group-v367263. [ 1475.352579] env[62510]: DEBUG oslo.service.loopingcall [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1475.352814] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1475.353066] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65fc9fbb-ca16-4698-8ca1-ddd499a78dcb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.381808] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1475.381808] env[62510]: value = "task-1768412" [ 1475.381808] env[62510]: _type = "Task" [ 1475.381808] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.390326] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768412, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.467373] env[62510]: DEBUG nova.compute.manager [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Received event network-changed-71218680-7c53-442a-ab27-cfa4db01f20c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1475.467715] env[62510]: DEBUG nova.compute.manager [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Refreshing instance network info cache due to event network-changed-71218680-7c53-442a-ab27-cfa4db01f20c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1475.468905] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] Acquiring lock "refresh_cache-b7c2c768-573b-4c1c-ade7-45fb87b95d41" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.469191] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] Acquired lock "refresh_cache-b7c2c768-573b-4c1c-ade7-45fb87b95d41" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.469465] env[62510]: DEBUG nova.network.neutron [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Refreshing network info cache for port 71218680-7c53-442a-ab27-cfa4db01f20c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1475.557488] env[62510]: DEBUG oslo_vmware.api [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768409, 'name': PowerOnVM_Task, 'duration_secs': 0.485093} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.557848] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.558103] env[62510]: INFO nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Took 7.39 seconds to spawn the instance on the hypervisor. [ 1475.558328] env[62510]: DEBUG nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1475.559279] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f40513-009f-44df-879d-1b47b43ce5de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.738602] env[62510]: DEBUG nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1475.773592] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1475.774039] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1475.775887] env[62510]: DEBUG nova.virt.hardware [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1475.776980] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a5356b-1f81-48f3-a8be-03de074c7d44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.788928] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb72813a-e195-43cf-acbb-046e4deaf2b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.832189] env[62510]: DEBUG oslo_concurrency.lockutils [None req-965f7475-5ba6-4957-b4f6-cb0c2ff0f9d5 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.188s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.893399] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768412, 'name': CreateVM_Task, 'duration_secs': 0.334504} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.893558] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1475.894301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.894540] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.894786] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1475.895045] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc2c7dd1-7eb0-45b7-bd8f-6fa1849b4e16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.902121] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1475.902121] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e18402-519c-8eb4-9d79-13947736602b" [ 1475.902121] env[62510]: _type = "Task" [ 1475.902121] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.910270] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e18402-519c-8eb4-9d79-13947736602b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.084464] env[62510]: INFO nova.compute.manager [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Took 41.88 seconds to build instance. [ 1476.281775] env[62510]: DEBUG nova.network.neutron [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Updated VIF entry in instance network info cache for port 71218680-7c53-442a-ab27-cfa4db01f20c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1476.282179] env[62510]: DEBUG nova.network.neutron [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Updating instance_info_cache with network_info: [{"id": "71218680-7c53-442a-ab27-cfa4db01f20c", "address": "fa:16:3e:6d:75:e4", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71218680-7c", "ovs_interfaceid": "71218680-7c53-442a-ab27-cfa4db01f20c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.320599] env[62510]: DEBUG nova.compute.manager [None req-1b97f493-e92b-41b5-8eda-51d305b27214 tempest-ServerExternalEventsTest-860687504 tempest-ServerExternalEventsTest-860687504-project] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Received event network-changed {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1476.322164] env[62510]: DEBUG nova.compute.manager [None req-1b97f493-e92b-41b5-8eda-51d305b27214 tempest-ServerExternalEventsTest-860687504 tempest-ServerExternalEventsTest-860687504-project] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Refreshing instance network info cache due to event network-changed. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1476.322164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b97f493-e92b-41b5-8eda-51d305b27214 tempest-ServerExternalEventsTest-860687504 tempest-ServerExternalEventsTest-860687504-project] Acquiring lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.322164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b97f493-e92b-41b5-8eda-51d305b27214 tempest-ServerExternalEventsTest-860687504 tempest-ServerExternalEventsTest-860687504-project] Acquired lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.322766] env[62510]: DEBUG nova.network.neutron [None req-1b97f493-e92b-41b5-8eda-51d305b27214 tempest-ServerExternalEventsTest-860687504 tempest-ServerExternalEventsTest-860687504-project] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1476.326564] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094fef29-7cf6-4411-96bf-b14edc66691d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.335258] env[62510]: DEBUG nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1476.339755] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bb0c31-503f-49a8-b2bf-309800f61b02 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.383090] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0e0ec8-2dd7-405a-b7fe-1daa5c8b254d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.394199] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b7e0454-b6c7-4584-9917-ce4f403f621f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.424048] env[62510]: DEBUG nova.compute.provider_tree [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1476.435203] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e18402-519c-8eb4-9d79-13947736602b, 'name': SearchDatastore_Task, 'duration_secs': 0.01073} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.435203] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.435449] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1476.435625] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.435863] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.436150] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1476.440022] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5887a99-28a2-417c-9cb8-011f9da35300 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.455486] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1476.455751] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1476.457771] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b600db1-d2d8-4f35-836c-edfd21934365 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.464426] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1476.464426] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523341b1-959d-ef24-970d-d4cc64fe97ee" [ 1476.464426] env[62510]: _type = "Task" [ 1476.464426] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.474536] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523341b1-959d-ef24-970d-d4cc64fe97ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.589479] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e063700e-b599-4243-ac05-b72a9b720f08 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.352s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.643161] env[62510]: DEBUG nova.network.neutron [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Successfully updated port: a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1476.784723] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3fd147-08b0-4397-b290-7d2919bc173c req-25872c0f-b49f-415c-b1bd-09c50dda3ce1 service nova] Releasing lock "refresh_cache-b7c2c768-573b-4c1c-ade7-45fb87b95d41" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.857617] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.929254] env[62510]: DEBUG nova.scheduler.client.report [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1476.980432] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523341b1-959d-ef24-970d-d4cc64fe97ee, 'name': SearchDatastore_Task, 'duration_secs': 0.031044} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.981234] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-302799e9-df92-49bc-af0d-88614d1c7c0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.986420] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1476.986420] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522d514e-270e-53bf-c860-8874e37ae02e" [ 1476.986420] env[62510]: _type = "Task" [ 1476.986420] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.994436] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522d514e-270e-53bf-c860-8874e37ae02e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.092360] env[62510]: DEBUG nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1477.117525] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "d42295c9-2b0e-471e-9a87-1d7367de9588" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.118045] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.118513] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "d42295c9-2b0e-471e-9a87-1d7367de9588-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.118796] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.118989] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.122654] env[62510]: INFO nova.compute.manager [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Terminating instance [ 1477.142891] env[62510]: DEBUG nova.network.neutron [None req-1b97f493-e92b-41b5-8eda-51d305b27214 tempest-ServerExternalEventsTest-860687504 tempest-ServerExternalEventsTest-860687504-project] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Updating instance_info_cache with network_info: [{"id": "74d14948-d303-4a00-ad07-26f85011c249", "address": "fa:16:3e:9f:48:35", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74d14948-d3", "ovs_interfaceid": "74d14948-d303-4a00-ad07-26f85011c249", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.145351] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.145492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.145627] env[62510]: DEBUG nova.network.neutron [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.435152] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.438020] env[62510]: DEBUG nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1477.443543] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 25.046s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.503629] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522d514e-270e-53bf-c860-8874e37ae02e, 'name': SearchDatastore_Task, 'duration_secs': 0.011054} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.503817] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.504685] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b7c2c768-573b-4c1c-ade7-45fb87b95d41/b7c2c768-573b-4c1c-ade7-45fb87b95d41.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1477.504685] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-117f84c1-d76a-4efb-94b9-b78bf7a443df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.513630] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1477.513630] env[62510]: value = "task-1768413" [ 1477.513630] env[62510]: _type = "Task" [ 1477.513630] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.520452] env[62510]: DEBUG nova.compute.manager [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received event network-vif-plugged-a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1477.520649] env[62510]: DEBUG oslo_concurrency.lockutils [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.520847] env[62510]: DEBUG oslo_concurrency.lockutils [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] Lock "0029d975-bd48-4558-9f41-a0cf91336393-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.521019] env[62510]: DEBUG oslo_concurrency.lockutils [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] Lock "0029d975-bd48-4558-9f41-a0cf91336393-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.521190] env[62510]: DEBUG nova.compute.manager [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] No waiting events found dispatching network-vif-plugged-a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1477.521350] env[62510]: WARNING nova.compute.manager [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received unexpected event network-vif-plugged-a003d1ad-b7fa-4edc-a654-9a89e9533cbd for instance with vm_state building and task_state spawning. [ 1477.521505] env[62510]: DEBUG nova.compute.manager [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received event network-changed-a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1477.521653] env[62510]: DEBUG nova.compute.manager [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Refreshing instance network info cache due to event network-changed-a003d1ad-b7fa-4edc-a654-9a89e9533cbd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1477.521811] env[62510]: DEBUG oslo_concurrency.lockutils [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] Acquiring lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.527997] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768413, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.612708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.612708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.612708] env[62510]: INFO nova.compute.manager [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Rebooting instance [ 1477.620107] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.627825] env[62510]: DEBUG nova.compute.manager [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1477.628051] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.630351] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3abc23-33df-445f-b8f7-73b1490b541d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.641245] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.641749] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fefea56c-3a5a-41bf-996b-3a0494b56858 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.647703] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b97f493-e92b-41b5-8eda-51d305b27214 tempest-ServerExternalEventsTest-860687504 tempest-ServerExternalEventsTest-860687504-project] Releasing lock "refresh_cache-d42295c9-2b0e-471e-9a87-1d7367de9588" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.648242] env[62510]: DEBUG oslo_vmware.api [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1477.648242] env[62510]: value = "task-1768414" [ 1477.648242] env[62510]: _type = "Task" [ 1477.648242] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.661787] env[62510]: DEBUG oslo_vmware.api [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768414, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.703287] env[62510]: DEBUG nova.network.neutron [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1477.884907] env[62510]: DEBUG nova.network.neutron [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.949626] env[62510]: DEBUG nova.compute.utils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1477.951169] env[62510]: DEBUG nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1477.951392] env[62510]: DEBUG nova.network.neutron [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1478.023541] env[62510]: DEBUG nova.policy [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ef61a999fed843dab89b52878d27e52e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00aa62de97a24e57890fd14f412e0244', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1478.029901] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768413, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448549} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.030181] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b7c2c768-573b-4c1c-ade7-45fb87b95d41/b7c2c768-573b-4c1c-ade7-45fb87b95d41.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1478.030399] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1478.030656] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f0c1c56-97a0-410a-ab91-748a27c73b91 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.038695] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1478.038695] env[62510]: value = "task-1768415" [ 1478.038695] env[62510]: _type = "Task" [ 1478.038695] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.051300] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768415, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.155741] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.156597] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquired lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.157080] env[62510]: DEBUG nova.network.neutron [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1478.169850] env[62510]: DEBUG oslo_vmware.api [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768414, 'name': PowerOffVM_Task, 'duration_secs': 0.32476} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.173132] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.173335] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.174505] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27c6386f-f8fa-4b74-b42f-1c943a10fde6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.248942] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.249246] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.249778] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Deleting the datastore file [datastore1] d42295c9-2b0e-471e-9a87-1d7367de9588 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.250113] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f31a159-3d9f-47c5-a718-c1afbb6f08a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.257540] env[62510]: DEBUG oslo_vmware.api [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for the task: (returnval){ [ 1478.257540] env[62510]: value = "task-1768417" [ 1478.257540] env[62510]: _type = "Task" [ 1478.257540] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.275301] env[62510]: DEBUG oslo_vmware.api [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.328198] env[62510]: DEBUG nova.network.neutron [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Successfully created port: fac4d91c-6432-4063-8e7b-93f076611d87 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1478.386204] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.386523] env[62510]: DEBUG nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Instance network_info: |[{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1478.386826] env[62510]: DEBUG oslo_concurrency.lockutils [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] Acquired lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.386998] env[62510]: DEBUG nova.network.neutron [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Refreshing network info cache for port a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.388528] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:5f:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a003d1ad-b7fa-4edc-a654-9a89e9533cbd', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.400934] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Creating folder: Project (cca414b18f8d431786c155d359f1325d). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1478.404786] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4d5179b-ef1f-4482-bc8f-10c5a02fbb89 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.415829] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Created folder: Project (cca414b18f8d431786c155d359f1325d) in parent group-v367197. [ 1478.416291] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Creating folder: Instances. Parent ref: group-v367266. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1478.418683] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ac59e00-1e8e-458e-90c9-e49d653c7394 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.428441] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Created folder: Instances in parent group-v367266. [ 1478.428687] env[62510]: DEBUG oslo.service.loopingcall [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.428894] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1478.429121] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d630d0b7-f937-40c0-b660-abe7ca0b1117 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.456273] env[62510]: DEBUG nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1478.460033] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.460033] env[62510]: value = "task-1768420" [ 1478.460033] env[62510]: _type = "Task" [ 1478.460033] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.476386] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768420, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.557897] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768415, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099627} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.558584] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1478.559430] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3868c666-2293-43fd-8c4d-a84e3f2e2501 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.586064] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] b7c2c768-573b-4c1c-ade7-45fb87b95d41/b7c2c768-573b-4c1c-ade7-45fb87b95d41.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1478.587189] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbbe2931-bea3-4836-8fc6-01a730979486 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.611616] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1478.611616] env[62510]: value = "task-1768421" [ 1478.611616] env[62510]: _type = "Task" [ 1478.611616] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.623190] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768421, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.742784] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27703aa-7f3b-45c4-af55-b1a479bf2f60 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.754951] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a04f88-9c1c-4cd4-986f-97e81a9d286a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.767858] env[62510]: DEBUG oslo_vmware.api [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Task: {'id': task-1768417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149307} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.798447] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.798707] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.798896] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.799090] env[62510]: INFO nova.compute.manager [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1478.799356] env[62510]: DEBUG oslo.service.loopingcall [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.802948] env[62510]: DEBUG nova.compute.manager [-] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1478.803222] env[62510]: DEBUG nova.network.neutron [-] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.805166] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ab94d2-88df-4f69-a52f-1612e75bd229 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.814342] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f57f79-6604-4be8-86e2-bcf07a6a6c37 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.829485] env[62510]: DEBUG nova.compute.provider_tree [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.966938] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768420, 'name': CreateVM_Task, 'duration_secs': 0.361628} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.967128] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1478.967842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.968021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.968415] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1478.971896] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b583547-94fe-4513-b87e-c4bc3d1817a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.977417] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1478.977417] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529139a8-5bef-3606-ec00-80b0c911fd2a" [ 1478.977417] env[62510]: _type = "Task" [ 1478.977417] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.985808] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529139a8-5bef-3606-ec00-80b0c911fd2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.122104] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768421, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.335196] env[62510]: DEBUG nova.scheduler.client.report [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1479.395506] env[62510]: DEBUG nova.network.neutron [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Updating instance_info_cache with network_info: [{"id": "23a05e54-a02b-4cd8-8812-f13c57329785", "address": "fa:16:3e:08:b6:8a", "network": {"id": "44a1c2ba-fa0e-43cd-96c4-1adaa5cc1504", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1916477814-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99476c2f77248e9a99d756b2bc12577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7edb7c08-2fae-4df5-9ec6-5ccf06d7e337", "external-id": "nsx-vlan-transportzone-309", "segmentation_id": 309, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23a05e54-a0", "ovs_interfaceid": "23a05e54-a02b-4cd8-8812-f13c57329785", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.476331] env[62510]: DEBUG nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1479.493846] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529139a8-5bef-3606-ec00-80b0c911fd2a, 'name': SearchDatastore_Task, 'duration_secs': 0.031632} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.495119] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.495119] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.495119] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.495119] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.496992] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.497319] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94e4232f-ab10-4738-9e32-d7f0c98b69db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.508783] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.508783] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.508783] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a234f38-1425-4825-b5b8-f3a7a3247a67 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.515203] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1479.515430] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1479.515585] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1479.515767] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1479.515912] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1479.516067] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1479.516352] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1479.516436] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1479.516578] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1479.516781] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1479.517098] env[62510]: DEBUG nova.virt.hardware [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1479.518491] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22320bf-3913-456f-8728-cd6bcaea3905 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.522031] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1479.522031] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f3986a-6a5f-3e55-7e79-99d8dd37110e" [ 1479.522031] env[62510]: _type = "Task" [ 1479.522031] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.528559] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645957ab-dc3e-4d51-8327-d4df02caffed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.534979] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f3986a-6a5f-3e55-7e79-99d8dd37110e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.578902] env[62510]: DEBUG nova.network.neutron [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updated VIF entry in instance network info cache for port a003d1ad-b7fa-4edc-a654-9a89e9533cbd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.579275] env[62510]: DEBUG nova.network.neutron [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.593921] env[62510]: DEBUG nova.network.neutron [-] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.621804] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768421, 'name': ReconfigVM_Task, 'duration_secs': 0.616169} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.622097] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Reconfigured VM instance instance-00000019 to attach disk [datastore1] b7c2c768-573b-4c1c-ade7-45fb87b95d41/b7c2c768-573b-4c1c-ade7-45fb87b95d41.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1479.622708] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9d48cf1e-0282-45d0-a74d-232a93e11a6f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.629487] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1479.629487] env[62510]: value = "task-1768426" [ 1479.629487] env[62510]: _type = "Task" [ 1479.629487] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.638260] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768426, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.694182] env[62510]: DEBUG nova.compute.manager [req-8b9bc1ed-70b5-4c93-a26c-e257a5457507 req-03312dcf-7374-44d1-a68d-b49f7fee19ad service nova] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Received event network-vif-deleted-74d14948-d303-4a00-ad07-26f85011c249 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1479.899132] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Releasing lock "refresh_cache-0a940fd0-73cc-403d-9afc-a989c67dfdef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.945966] env[62510]: DEBUG nova.network.neutron [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Successfully updated port: fac4d91c-6432-4063-8e7b-93f076611d87 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1480.033293] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f3986a-6a5f-3e55-7e79-99d8dd37110e, 'name': SearchDatastore_Task, 'duration_secs': 0.012211} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.034133] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30dfea7a-4726-416e-b93d-c47de3e351d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.039289] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1480.039289] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52910baa-9311-556c-b075-bea1d39b3460" [ 1480.039289] env[62510]: _type = "Task" [ 1480.039289] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.046982] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52910baa-9311-556c-b075-bea1d39b3460, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.081900] env[62510]: DEBUG oslo_concurrency.lockutils [req-e004ebb3-24c9-42cf-8363-f96273b7888a req-b992cc84-4562-410f-a9b1-23a9af9f5cd2 service nova] Releasing lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.097623] env[62510]: INFO nova.compute.manager [-] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Took 1.29 seconds to deallocate network for instance. [ 1480.140252] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768426, 'name': Rename_Task, 'duration_secs': 0.1497} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.140582] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1480.140823] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee7015a4-47c6-4f24-8826-0483f31eab53 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.146995] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1480.146995] env[62510]: value = "task-1768427" [ 1480.146995] env[62510]: _type = "Task" [ 1480.146995] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.154526] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.347101] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.908s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.350126] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.791s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.351587] env[62510]: INFO nova.compute.claims [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.407522] env[62510]: DEBUG nova.compute.manager [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1480.408502] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc82469e-5ebd-4397-9458-e0bfc87308ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.448571] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "refresh_cache-4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.448732] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired lock "refresh_cache-4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.448885] env[62510]: DEBUG nova.network.neutron [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1480.550775] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52910baa-9311-556c-b075-bea1d39b3460, 'name': SearchDatastore_Task, 'duration_secs': 0.008406} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.551149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.551545] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0029d975-bd48-4558-9f41-a0cf91336393/0029d975-bd48-4558-9f41-a0cf91336393.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1480.551925] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b942c24e-5606-439b-925f-84fc5f959979 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.559554] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1480.559554] env[62510]: value = "task-1768428" [ 1480.559554] env[62510]: _type = "Task" [ 1480.559554] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.570039] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.604582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.657558] env[62510]: DEBUG oslo_vmware.api [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768427, 'name': PowerOnVM_Task, 'duration_secs': 0.451257} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.657662] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1480.657793] env[62510]: INFO nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1480.657972] env[62510]: DEBUG nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1480.659097] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5ccfff-0332-4433-8cf3-e28bf7b9df13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.919476] env[62510]: INFO nova.scheduler.client.report [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Deleted allocation for migration a3d89ea5-941d-4795-af70-8061e49c8be5 [ 1481.001732] env[62510]: DEBUG nova.network.neutron [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1481.073041] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768428, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.184182] env[62510]: INFO nova.compute.manager [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Took 40.09 seconds to build instance. [ 1481.237706] env[62510]: DEBUG nova.network.neutron [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Updating instance_info_cache with network_info: [{"id": "fac4d91c-6432-4063-8e7b-93f076611d87", "address": "fa:16:3e:c3:30:55", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfac4d91c-64", "ovs_interfaceid": "fac4d91c-6432-4063-8e7b-93f076611d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.429920] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77c2addd-0259-4f24-8f69-727d7125e8f5 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 32.159s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.433110] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e3a113-4ce9-4f37-8fca-eb2a8bd776df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.441265] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Doing hard reboot of VM {{(pid=62510) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1481.443760] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-d3dcf55d-3af1-4e0f-a6a1-eec535b3da77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.451864] env[62510]: DEBUG oslo_vmware.api [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1481.451864] env[62510]: value = "task-1768429" [ 1481.451864] env[62510]: _type = "Task" [ 1481.451864] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.459832] env[62510]: DEBUG oslo_vmware.api [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768429, 'name': ResetVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.573059] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768428, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579322} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.573355] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0029d975-bd48-4558-9f41-a0cf91336393/0029d975-bd48-4558-9f41-a0cf91336393.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1481.573575] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1481.573825] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2efe7608-a26f-4fe9-8fab-c173e05b9302 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.582976] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1481.582976] env[62510]: value = "task-1768430" [ 1481.582976] env[62510]: _type = "Task" [ 1481.582976] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.590785] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768430, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.685696] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0260b3-07ca-4e27-aebc-13dcc82dd6f7 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.154s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.731557] env[62510]: DEBUG nova.compute.manager [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Received event network-vif-plugged-fac4d91c-6432-4063-8e7b-93f076611d87 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1481.732616] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] Acquiring lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.732616] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1481.732616] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.732616] env[62510]: DEBUG nova.compute.manager [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] No waiting events found dispatching network-vif-plugged-fac4d91c-6432-4063-8e7b-93f076611d87 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1481.732616] env[62510]: WARNING nova.compute.manager [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Received unexpected event network-vif-plugged-fac4d91c-6432-4063-8e7b-93f076611d87 for instance with vm_state building and task_state spawning. [ 1481.732776] env[62510]: DEBUG nova.compute.manager [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Received event network-changed-fac4d91c-6432-4063-8e7b-93f076611d87 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1481.732821] env[62510]: DEBUG nova.compute.manager [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Refreshing instance network info cache due to event network-changed-fac4d91c-6432-4063-8e7b-93f076611d87. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1481.732974] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] Acquiring lock "refresh_cache-4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.740669] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Releasing lock "refresh_cache-4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.740969] env[62510]: DEBUG nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Instance network_info: |[{"id": "fac4d91c-6432-4063-8e7b-93f076611d87", "address": "fa:16:3e:c3:30:55", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfac4d91c-64", "ovs_interfaceid": "fac4d91c-6432-4063-8e7b-93f076611d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1481.741529] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] Acquired lock "refresh_cache-4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.741721] env[62510]: DEBUG nova.network.neutron [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Refreshing network info cache for port fac4d91c-6432-4063-8e7b-93f076611d87 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1481.743366] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:30:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fac4d91c-6432-4063-8e7b-93f076611d87', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1481.752349] env[62510]: DEBUG oslo.service.loopingcall [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1481.756700] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1481.757667] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd203014-ac9d-4006-bf02-c4e0c37b3527 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.780953] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1481.780953] env[62510]: value = "task-1768431" [ 1481.780953] env[62510]: _type = "Task" [ 1481.780953] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.790901] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768431, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.957794] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280754a0-a1c6-4908-a286-7cbd3407a879 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.967905] env[62510]: DEBUG oslo_vmware.api [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768429, 'name': ResetVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.968804] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a52102-9081-493a-84ff-fa5e81ab51f1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.000104] env[62510]: DEBUG nova.network.neutron [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Updated VIF entry in instance network info cache for port fac4d91c-6432-4063-8e7b-93f076611d87. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1482.000661] env[62510]: DEBUG nova.network.neutron [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Updating instance_info_cache with network_info: [{"id": "fac4d91c-6432-4063-8e7b-93f076611d87", "address": "fa:16:3e:c3:30:55", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfac4d91c-64", "ovs_interfaceid": "fac4d91c-6432-4063-8e7b-93f076611d87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.002110] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24609d02-e48e-493a-a0d0-75d733918b51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.010250] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1555d491-3a5a-4b71-97d5-028ee3d759d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.025602] env[62510]: DEBUG nova.compute.provider_tree [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.093889] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768430, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.169946} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.094220] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1482.099045] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f271fe-edc2-4b5f-974e-841a899ce386 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.118647] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 0029d975-bd48-4558-9f41-a0cf91336393/0029d975-bd48-4558-9f41-a0cf91336393.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1482.118945] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b290132c-5a45-4b8f-94ac-73832d8aa598 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.142850] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1482.142850] env[62510]: value = "task-1768432" [ 1482.142850] env[62510]: _type = "Task" [ 1482.142850] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.155498] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768432, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.189350] env[62510]: DEBUG nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1482.292297] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768431, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.462631] env[62510]: DEBUG oslo_vmware.api [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768429, 'name': ResetVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.505868] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f06ac90-3051-4414-b4e6-e079b5b2dd35 req-1c7d6af2-1e6c-4b46-aa90-52fa74b420a7 service nova] Releasing lock "refresh_cache-4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.530144] env[62510]: DEBUG nova.scheduler.client.report [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1482.654440] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.708014] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.792060] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768431, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.883153] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.883392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.964629] env[62510]: DEBUG oslo_vmware.api [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768429, 'name': ResetVM_Task, 'duration_secs': 1.106148} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.964937] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Did hard reboot of VM {{(pid=62510) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1482.965161] env[62510]: DEBUG nova.compute.manager [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1482.966022] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73510d4f-8300-4fab-aeb6-c294e134d57c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.037558] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.687s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.037669] env[62510]: DEBUG nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1483.042031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 23.322s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.042031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.042178] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1483.042519] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.965s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.044666] env[62510]: INFO nova.compute.claims [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.050790] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bd1b41a-6584-42c7-921b-b81a5e3ba7ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.060167] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbadfce-ed4b-42a7-8f25-c8608993e90c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.075892] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c4e5df-3628-495d-8239-a08f66c3c6a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.084155] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035ee270-de1f-4c62-8006-af4a0ca4899f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.115808] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180085MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1483.115808] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.157185] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768432, 'name': ReconfigVM_Task, 'duration_secs': 0.931038} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.157185] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 0029d975-bd48-4558-9f41-a0cf91336393/0029d975-bd48-4558-9f41-a0cf91336393.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1483.158020] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9cad8636-9ee9-485c-a669-7292387b9a7f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.165652] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1483.165652] env[62510]: value = "task-1768434" [ 1483.165652] env[62510]: _type = "Task" [ 1483.165652] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.177119] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768434, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.298858] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768431, 'name': CreateVM_Task, 'duration_secs': 1.356565} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.299098] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1483.300672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.300672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.300672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1483.300836] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00dfe125-1eb0-41b5-a56b-0dccc1f9f327 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.306380] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1483.306380] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f27c98-cd7f-0940-e119-a1b82314a5c2" [ 1483.306380] env[62510]: _type = "Task" [ 1483.306380] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.319326] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f27c98-cd7f-0940-e119-a1b82314a5c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.481487] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4681a9f-550c-4445-939b-4c642c55d9f1 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.870s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.543769] env[62510]: DEBUG nova.compute.utils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.545223] env[62510]: DEBUG nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1483.549333] env[62510]: DEBUG nova.network.neutron [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1483.618061] env[62510]: DEBUG nova.policy [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4caa1ae9def141d39d03758768edc29e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a623ac1deaf42f0af0bbd743c622316', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1483.676802] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768434, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.773285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.773556] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.774834] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "0a940fd0-73cc-403d-9afc-a989c67dfdef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.774834] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.774834] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.776618] env[62510]: INFO nova.compute.manager [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Terminating instance [ 1483.817796] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f27c98-cd7f-0940-e119-a1b82314a5c2, 'name': SearchDatastore_Task, 'duration_secs': 0.012921} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.818127] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.818361] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1483.818673] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.818819] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.818996] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1483.819288] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41a3f387-cf9a-4f48-88d1-c2bf545cc1d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.831626] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1483.831820] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1483.832587] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-743204b5-6951-455a-8be3-0de9b135fd90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.838278] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1483.838278] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523f350b-aba9-162a-532c-ee087b3f002b" [ 1483.838278] env[62510]: _type = "Task" [ 1483.838278] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.845833] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523f350b-aba9-162a-532c-ee087b3f002b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.950848] env[62510]: DEBUG nova.network.neutron [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Successfully created port: 152cd402-ef01-4f90-a464-27b8e3ac0650 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1484.052871] env[62510]: DEBUG nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1484.180077] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768434, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.282649] env[62510]: DEBUG nova.compute.manager [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1484.282921] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1484.286317] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37bc0ae-fd2f-4a41-855f-b87915c5f8a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.294210] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1484.294518] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d01387a1-3344-444a-9694-93961bfbedf5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.304024] env[62510]: DEBUG oslo_vmware.api [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1484.304024] env[62510]: value = "task-1768435" [ 1484.304024] env[62510]: _type = "Task" [ 1484.304024] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.312524] env[62510]: DEBUG oslo_vmware.api [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.349734] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523f350b-aba9-162a-532c-ee087b3f002b, 'name': SearchDatastore_Task, 'duration_secs': 0.056429} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.352536] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1767c491-19e5-4f96-a442-ef0c089f82bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.357963] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1484.357963] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e53517-20e0-a3ac-0ded-e8db6b010aa5" [ 1484.357963] env[62510]: _type = "Task" [ 1484.357963] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.366269] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e53517-20e0-a3ac-0ded-e8db6b010aa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.620477] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062e6b08-38b4-4f37-92c8-22e9e249933c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.629673] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ede0af-42c4-4405-9c65-7cab31a6d9fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.662883] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ea2b6e-ad9a-4b31-9562-6852d8ac1471 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.673842] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d0f895-a44e-4e51-a6f8-144ee5cb281d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.682420] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768434, 'name': Rename_Task, 'duration_secs': 1.261451} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.691502] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1484.692081] env[62510]: DEBUG nova.compute.provider_tree [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1484.693323] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fabb03c-6a48-4e66-94ad-37dced853aec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.700605] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1484.700605] env[62510]: value = "task-1768437" [ 1484.700605] env[62510]: _type = "Task" [ 1484.700605] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.709827] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768437, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.814744] env[62510]: DEBUG oslo_vmware.api [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768435, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.868305] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e53517-20e0-a3ac-0ded-e8db6b010aa5, 'name': SearchDatastore_Task, 'duration_secs': 0.020582} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.868562] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.868826] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb/4f9bfb02-8aea-45a9-85ea-97e70f0d41fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1484.869104] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1211eea8-ae65-43e5-adb5-5251a72a3d07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.876601] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1484.876601] env[62510]: value = "task-1768438" [ 1484.876601] env[62510]: _type = "Task" [ 1484.876601] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.885582] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.063637] env[62510]: DEBUG nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1485.099672] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1485.099861] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1485.100023] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1485.100067] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1485.100221] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1485.100368] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1485.100810] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1485.101022] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1485.101204] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1485.101371] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1485.101545] env[62510]: DEBUG nova.virt.hardware [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1485.102478] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4e0a71-33ea-4ec1-a269-ac73ca79d0e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.111562] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7209d605-bbbc-4886-ac6a-de9428c2d7fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.197233] env[62510]: DEBUG nova.scheduler.client.report [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1485.210950] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768437, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.317869] env[62510]: DEBUG oslo_vmware.api [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768435, 'name': PowerOffVM_Task, 'duration_secs': 0.599963} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.318179] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1485.318350] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1485.318680] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dab70545-2a6a-45ff-8c02-f28e36343f7f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.365818] env[62510]: DEBUG nova.compute.manager [req-3704fca7-4021-4018-96dd-0238c3a41cda req-ac6eb664-d477-48ec-9d87-5d054c597f41 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Received event network-vif-plugged-152cd402-ef01-4f90-a464-27b8e3ac0650 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1485.366061] env[62510]: DEBUG oslo_concurrency.lockutils [req-3704fca7-4021-4018-96dd-0238c3a41cda req-ac6eb664-d477-48ec-9d87-5d054c597f41 service nova] Acquiring lock "a040671e-941d-4406-81af-f2f7a4b690e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1485.366279] env[62510]: DEBUG oslo_concurrency.lockutils [req-3704fca7-4021-4018-96dd-0238c3a41cda req-ac6eb664-d477-48ec-9d87-5d054c597f41 service nova] Lock "a040671e-941d-4406-81af-f2f7a4b690e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.366450] env[62510]: DEBUG oslo_concurrency.lockutils [req-3704fca7-4021-4018-96dd-0238c3a41cda req-ac6eb664-d477-48ec-9d87-5d054c597f41 service nova] Lock "a040671e-941d-4406-81af-f2f7a4b690e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.366620] env[62510]: DEBUG nova.compute.manager [req-3704fca7-4021-4018-96dd-0238c3a41cda req-ac6eb664-d477-48ec-9d87-5d054c597f41 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] No waiting events found dispatching network-vif-plugged-152cd402-ef01-4f90-a464-27b8e3ac0650 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1485.366796] env[62510]: WARNING nova.compute.manager [req-3704fca7-4021-4018-96dd-0238c3a41cda req-ac6eb664-d477-48ec-9d87-5d054c597f41 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Received unexpected event network-vif-plugged-152cd402-ef01-4f90-a464-27b8e3ac0650 for instance with vm_state building and task_state spawning. [ 1485.386653] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489626} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.386973] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb/4f9bfb02-8aea-45a9-85ea-97e70f0d41fb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1485.387159] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1485.387408] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-153d6ab2-7f1e-4ab1-81fd-62d53117768a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.394447] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1485.394447] env[62510]: value = "task-1768440" [ 1485.394447] env[62510]: _type = "Task" [ 1485.394447] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.399670] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1485.399877] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1485.400090] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Deleting the datastore file [datastore1] 0a940fd0-73cc-403d-9afc-a989c67dfdef {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1485.400767] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a638f69-2f0e-4563-a2c1-6a5fb3a53769 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.406920] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.411043] env[62510]: DEBUG oslo_vmware.api [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for the task: (returnval){ [ 1485.411043] env[62510]: value = "task-1768441" [ 1485.411043] env[62510]: _type = "Task" [ 1485.411043] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.419395] env[62510]: DEBUG oslo_vmware.api [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.461274] env[62510]: DEBUG nova.network.neutron [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Successfully updated port: 152cd402-ef01-4f90-a464-27b8e3ac0650 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1485.705820] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.706351] env[62510]: DEBUG nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1485.709358] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.330s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1485.711940] env[62510]: INFO nova.compute.claims [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1485.724173] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768437, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.904266] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066084} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.904624] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1485.905430] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319b9828-e8b3-4a4a-9801-47a537223ca7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.929184] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb/4f9bfb02-8aea-45a9-85ea-97e70f0d41fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1485.933457] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f857dda-ec11-4aff-a8c0-eec27af8d178 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.952247] env[62510]: DEBUG oslo_vmware.api [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Task: {'id': task-1768441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150669} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.953512] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1485.953712] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1485.953892] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1485.954165] env[62510]: INFO nova.compute.manager [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1485.954325] env[62510]: DEBUG oslo.service.loopingcall [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1485.954602] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1485.954602] env[62510]: value = "task-1768442" [ 1485.954602] env[62510]: _type = "Task" [ 1485.954602] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.954795] env[62510]: DEBUG nova.compute.manager [-] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1485.954894] env[62510]: DEBUG nova.network.neutron [-] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1485.965610] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "refresh_cache-a040671e-941d-4406-81af-f2f7a4b690e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.965759] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquired lock "refresh_cache-a040671e-941d-4406-81af-f2f7a4b690e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.965913] env[62510]: DEBUG nova.network.neutron [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1485.967069] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768442, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.212288] env[62510]: DEBUG oslo_vmware.api [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768437, 'name': PowerOnVM_Task, 'duration_secs': 1.358921} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.212814] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.213026] env[62510]: INFO nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Took 10.47 seconds to spawn the instance on the hypervisor. [ 1486.213202] env[62510]: DEBUG nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1486.213992] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d9aef0-7ab6-46de-87b4-dae4cb72982a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.219640] env[62510]: DEBUG nova.compute.utils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1486.221121] env[62510]: DEBUG nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1486.221290] env[62510]: DEBUG nova.network.neutron [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1486.287798] env[62510]: DEBUG nova.policy [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3f66a682b5747bbba3de4f10442882d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e19e216f37fe490abb9e06fecee78320', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1486.466023] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768442, 'name': ReconfigVM_Task, 'duration_secs': 0.325595} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.467030] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb/4f9bfb02-8aea-45a9-85ea-97e70f0d41fb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1486.467150] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc5fa676-0416-4261-bf0b-7c767b3da855 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.476215] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1486.476215] env[62510]: value = "task-1768444" [ 1486.476215] env[62510]: _type = "Task" [ 1486.476215] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.485552] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768444, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.499934] env[62510]: DEBUG nova.network.neutron [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1486.616775] env[62510]: DEBUG nova.network.neutron [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Successfully created port: 66e8c35a-f1f1-4dfc-94de-fd2781f02eae {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1486.698676] env[62510]: DEBUG nova.network.neutron [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Updating instance_info_cache with network_info: [{"id": "152cd402-ef01-4f90-a464-27b8e3ac0650", "address": "fa:16:3e:44:3e:e1", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152cd402-ef", "ovs_interfaceid": "152cd402-ef01-4f90-a464-27b8e3ac0650", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.713345] env[62510]: DEBUG nova.network.neutron [-] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.725084] env[62510]: DEBUG nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1486.736286] env[62510]: INFO nova.compute.manager [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Took 38.59 seconds to build instance. [ 1486.985488] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768444, 'name': Rename_Task, 'duration_secs': 0.16951} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.987868] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1486.988437] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd33aa9b-06d5-41ad-8c98-78e35b8d85ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.994606] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1486.994606] env[62510]: value = "task-1768445" [ 1486.994606] env[62510]: _type = "Task" [ 1486.994606] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.003235] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768445, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.201970] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Releasing lock "refresh_cache-a040671e-941d-4406-81af-f2f7a4b690e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.202338] env[62510]: DEBUG nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Instance network_info: |[{"id": "152cd402-ef01-4f90-a464-27b8e3ac0650", "address": "fa:16:3e:44:3e:e1", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152cd402-ef", "ovs_interfaceid": "152cd402-ef01-4f90-a464-27b8e3ac0650", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1487.202766] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:3e:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '152cd402-ef01-4f90-a464-27b8e3ac0650', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1487.210507] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Creating folder: Project (3a623ac1deaf42f0af0bbd743c622316). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.213115] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1a2565c-6ec9-4d14-bb52-647d4cfaf353 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.215369] env[62510]: INFO nova.compute.manager [-] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Took 1.26 seconds to deallocate network for instance. [ 1487.224055] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Created folder: Project (3a623ac1deaf42f0af0bbd743c622316) in parent group-v367197. [ 1487.224260] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Creating folder: Instances. Parent ref: group-v367273. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.225286] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91f7509d-6133-465e-8468-655e59a4546b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.227367] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a45c126-8d34-411d-94d1-5f7b5414abcf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.238721] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e011fec8-fe38-4602-b607-07b11553cb17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.242402] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d33c0490-f98d-426e-8ca0-050d6c6e627e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0029d975-bd48-4558-9f41-a0cf91336393" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.308s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.282030] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f3ea1a-87ab-4973-b08f-ddeb4d39e46c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.284866] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Created folder: Instances in parent group-v367273. [ 1487.285215] env[62510]: DEBUG oslo.service.loopingcall [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1487.285495] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1487.286205] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79767120-6507-44ac-8377-f96bd04375fb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.307761] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fc5d05-9bd5-484c-9789-a51346daab52 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.317069] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1487.317069] env[62510]: value = "task-1768448" [ 1487.317069] env[62510]: _type = "Task" [ 1487.317069] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.329712] env[62510]: DEBUG nova.compute.provider_tree [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1487.338960] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768448, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.398318] env[62510]: DEBUG nova.compute.manager [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Received event network-changed-152cd402-ef01-4f90-a464-27b8e3ac0650 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1487.398318] env[62510]: DEBUG nova.compute.manager [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Refreshing instance network info cache due to event network-changed-152cd402-ef01-4f90-a464-27b8e3ac0650. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1487.398318] env[62510]: DEBUG oslo_concurrency.lockutils [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] Acquiring lock "refresh_cache-a040671e-941d-4406-81af-f2f7a4b690e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.398694] env[62510]: DEBUG oslo_concurrency.lockutils [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] Acquired lock "refresh_cache-a040671e-941d-4406-81af-f2f7a4b690e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.398694] env[62510]: DEBUG nova.network.neutron [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Refreshing network info cache for port 152cd402-ef01-4f90-a464-27b8e3ac0650 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1487.506821] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768445, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.721289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.735787] env[62510]: DEBUG nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1487.745459] env[62510]: DEBUG nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1487.765049] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1487.765304] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1487.765467] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1487.765654] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1487.765791] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1487.766418] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1487.766712] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1487.766913] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1487.767128] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1487.767335] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1487.767548] env[62510]: DEBUG nova.virt.hardware [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1487.768480] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891fffdb-ded6-4eed-aa6e-ed2c96767112 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.776885] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbb8bad-684d-4c6b-b64e-2ce6d08beaaa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.827849] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768448, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.834825] env[62510]: DEBUG nova.scheduler.client.report [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1488.005137] env[62510]: DEBUG oslo_vmware.api [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768445, 'name': PowerOnVM_Task, 'duration_secs': 0.872011} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.005471] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.005694] env[62510]: INFO nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Took 8.53 seconds to spawn the instance on the hypervisor. [ 1488.005880] env[62510]: DEBUG nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1488.006669] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e80e1d-8a83-4818-9394-afa4c4794c83 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.123442] env[62510]: DEBUG nova.network.neutron [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Successfully updated port: 66e8c35a-f1f1-4dfc-94de-fd2781f02eae {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1488.271434] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.330466] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768448, 'name': CreateVM_Task, 'duration_secs': 0.611534} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.330640] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1488.331328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.331493] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.331817] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1488.332086] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cc3030d-7c7d-4158-a13f-dfcb8e4b5660 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.337962] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1488.337962] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529fdffd-2ff7-2d99-b321-c7f54899c984" [ 1488.337962] env[62510]: _type = "Task" [ 1488.337962] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.341928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.342465] env[62510]: DEBUG nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1488.346610] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.902s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.346858] env[62510]: DEBUG nova.objects.instance [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1488.355064] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529fdffd-2ff7-2d99-b321-c7f54899c984, 'name': SearchDatastore_Task, 'duration_secs': 0.01089} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.355357] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.355620] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1488.355857] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.356155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.356239] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1488.356440] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9716b743-8ead-4e89-b2e4-676dc7bdca38 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.364523] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1488.364774] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1488.365464] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcbf2ec6-00d7-4003-84a3-aabaa8db1dde {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.371329] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1488.371329] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5207e2e2-a2d9-c611-a233-5c7bfb7f8a1f" [ 1488.371329] env[62510]: _type = "Task" [ 1488.371329] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.381953] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5207e2e2-a2d9-c611-a233-5c7bfb7f8a1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.408454] env[62510]: DEBUG nova.network.neutron [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Updated VIF entry in instance network info cache for port 152cd402-ef01-4f90-a464-27b8e3ac0650. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1488.408815] env[62510]: DEBUG nova.network.neutron [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Updating instance_info_cache with network_info: [{"id": "152cd402-ef01-4f90-a464-27b8e3ac0650", "address": "fa:16:3e:44:3e:e1", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.220", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap152cd402-ef", "ovs_interfaceid": "152cd402-ef01-4f90-a464-27b8e3ac0650", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.523050] env[62510]: INFO nova.compute.manager [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Took 37.16 seconds to build instance. [ 1488.582448] env[62510]: DEBUG nova.compute.manager [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received event network-changed-a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1488.582448] env[62510]: DEBUG nova.compute.manager [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Refreshing instance network info cache due to event network-changed-a003d1ad-b7fa-4edc-a654-9a89e9533cbd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1488.582448] env[62510]: DEBUG oslo_concurrency.lockutils [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] Acquiring lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.582448] env[62510]: DEBUG oslo_concurrency.lockutils [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] Acquired lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.582448] env[62510]: DEBUG nova.network.neutron [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Refreshing network info cache for port a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1488.628501] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "refresh_cache-aca56820-5a06-43dd-9d98-25421f7ef6a6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.628501] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquired lock "refresh_cache-aca56820-5a06-43dd-9d98-25421f7ef6a6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.628501] env[62510]: DEBUG nova.network.neutron [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1488.849074] env[62510]: DEBUG nova.compute.utils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1488.849963] env[62510]: DEBUG nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1488.850198] env[62510]: DEBUG nova.network.neutron [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1488.882896] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5207e2e2-a2d9-c611-a233-5c7bfb7f8a1f, 'name': SearchDatastore_Task, 'duration_secs': 0.015416} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.883720] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-372fc40e-0e1d-4bec-be05-8363d94596a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.890125] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1488.890125] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e510fe-7ff8-f7c4-e097-7d7a2c61831f" [ 1488.890125] env[62510]: _type = "Task" [ 1488.890125] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.902476] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e510fe-7ff8-f7c4-e097-7d7a2c61831f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.911641] env[62510]: DEBUG oslo_concurrency.lockutils [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] Releasing lock "refresh_cache-a040671e-941d-4406-81af-f2f7a4b690e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.911910] env[62510]: DEBUG nova.compute.manager [req-f792bb5f-4150-44fc-8de0-a0d19d22526b req-4c38a059-d5f6-4182-bfe6-806c2bf92891 service nova] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Received event network-vif-deleted-23a05e54-a02b-4cd8-8812-f13c57329785 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1489.009422] env[62510]: DEBUG nova.policy [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '87bcaadb9e9c4f0b87e85f1e1ce537db', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '176b26f90d5441208c6157567aaf19fe', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1489.025940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c541c56-284a-437d-a218-84c1eb9df47e tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.986s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.212783] env[62510]: DEBUG nova.network.neutron [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1489.347790] env[62510]: DEBUG nova.network.neutron [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updated VIF entry in instance network info cache for port a003d1ad-b7fa-4edc-a654-9a89e9533cbd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1489.348421] env[62510]: DEBUG nova.network.neutron [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.355510] env[62510]: DEBUG nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1489.362272] env[62510]: DEBUG oslo_concurrency.lockutils [None req-884778d4-185f-4681-bb9d-e1a0f410b79b tempest-ServersAdmin275Test-994320164 tempest-ServersAdmin275Test-994320164-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.362272] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.345s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.362272] env[62510]: DEBUG nova.objects.instance [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lazy-loading 'resources' on Instance uuid c58184e7-bf4f-406b-a778-9b8f60740fe6 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1489.402891] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e510fe-7ff8-f7c4-e097-7d7a2c61831f, 'name': SearchDatastore_Task, 'duration_secs': 0.010698} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.403729] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.404323] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a040671e-941d-4406-81af-f2f7a4b690e4/a040671e-941d-4406-81af-f2f7a4b690e4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1489.404767] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d25d641f-b115-43d2-bf58-d52d709c4426 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.412065] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1489.412065] env[62510]: value = "task-1768450" [ 1489.412065] env[62510]: _type = "Task" [ 1489.412065] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.421624] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.508904] env[62510]: DEBUG nova.network.neutron [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Updating instance_info_cache with network_info: [{"id": "66e8c35a-f1f1-4dfc-94de-fd2781f02eae", "address": "fa:16:3e:42:93:41", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66e8c35a-f1", "ovs_interfaceid": "66e8c35a-f1f1-4dfc-94de-fd2781f02eae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.529306] env[62510]: DEBUG nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1489.560895] env[62510]: DEBUG nova.network.neutron [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Successfully created port: 16130581-d9c2-41e9-8c94-f66f9c4f357c {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1489.778672] env[62510]: DEBUG nova.compute.manager [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Received event network-vif-plugged-66e8c35a-f1f1-4dfc-94de-fd2781f02eae {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1489.779084] env[62510]: DEBUG oslo_concurrency.lockutils [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] Acquiring lock "aca56820-5a06-43dd-9d98-25421f7ef6a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1489.779438] env[62510]: DEBUG oslo_concurrency.lockutils [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1489.779851] env[62510]: DEBUG oslo_concurrency.lockutils [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.780072] env[62510]: DEBUG nova.compute.manager [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] No waiting events found dispatching network-vif-plugged-66e8c35a-f1f1-4dfc-94de-fd2781f02eae {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1489.780703] env[62510]: WARNING nova.compute.manager [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Received unexpected event network-vif-plugged-66e8c35a-f1f1-4dfc-94de-fd2781f02eae for instance with vm_state building and task_state spawning. [ 1489.780703] env[62510]: DEBUG nova.compute.manager [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Received event network-changed-66e8c35a-f1f1-4dfc-94de-fd2781f02eae {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1489.780919] env[62510]: DEBUG nova.compute.manager [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Refreshing instance network info cache due to event network-changed-66e8c35a-f1f1-4dfc-94de-fd2781f02eae. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1489.781161] env[62510]: DEBUG oslo_concurrency.lockutils [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] Acquiring lock "refresh_cache-aca56820-5a06-43dd-9d98-25421f7ef6a6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.859045] env[62510]: DEBUG oslo_concurrency.lockutils [req-a1c6b868-fd3a-4f9f-9720-ff55f47e6688 req-245d323f-794b-44de-a253-05d32e13595a service nova] Releasing lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.923538] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768450, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.014023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Releasing lock "refresh_cache-aca56820-5a06-43dd-9d98-25421f7ef6a6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.014023] env[62510]: DEBUG nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Instance network_info: |[{"id": "66e8c35a-f1f1-4dfc-94de-fd2781f02eae", "address": "fa:16:3e:42:93:41", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66e8c35a-f1", "ovs_interfaceid": "66e8c35a-f1f1-4dfc-94de-fd2781f02eae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1490.014023] env[62510]: DEBUG oslo_concurrency.lockutils [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] Acquired lock "refresh_cache-aca56820-5a06-43dd-9d98-25421f7ef6a6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.014023] env[62510]: DEBUG nova.network.neutron [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Refreshing network info cache for port 66e8c35a-f1f1-4dfc-94de-fd2781f02eae {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.014702] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:93:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66e8c35a-f1f1-4dfc-94de-fd2781f02eae', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1490.024231] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Creating folder: Project (e19e216f37fe490abb9e06fecee78320). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1490.027401] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0660fae9-d72f-4496-818b-9b6673941c66 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.039332] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Created folder: Project (e19e216f37fe490abb9e06fecee78320) in parent group-v367197. [ 1490.039547] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Creating folder: Instances. Parent ref: group-v367277. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1490.044051] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8342813b-0072-4753-aa16-66356148b2ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.057424] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Created folder: Instances in parent group-v367277. [ 1490.057424] env[62510]: DEBUG oslo.service.loopingcall [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.059141] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1490.060246] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.060508] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53ada6e2-7083-433e-a630-d852022e2ed7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.084280] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1490.084280] env[62510]: value = "task-1768453" [ 1490.084280] env[62510]: _type = "Task" [ 1490.084280] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.097481] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768453, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.370101] env[62510]: DEBUG nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1490.397199] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1490.397555] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1490.397794] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1490.398087] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1490.398321] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1490.398540] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1490.398815] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1490.399072] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1490.399329] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1490.399577] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1490.399843] env[62510]: DEBUG nova.virt.hardware [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1490.401639] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8528ec-86cb-4988-96be-ba3953a057fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.409566] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4873cf-3c37-47c5-a598-cf4c1215b2b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.434963] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768450, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555989} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.435586] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a040671e-941d-4406-81af-f2f7a4b690e4/a040671e-941d-4406-81af-f2f7a4b690e4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1490.435707] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1490.435962] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-142ae94b-d296-4512-abe7-5f44e46ff59f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.442490] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1490.442490] env[62510]: value = "task-1768454" [ 1490.442490] env[62510]: _type = "Task" [ 1490.442490] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.450733] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.452587] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac39b52-aaff-492f-a1d3-2b0dc9f5b4f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.460727] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd04b0a1-c144-481d-9e6d-3992916a827b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.491280] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f002a58-9c25-4b67-b34f-a81045e8bf58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.500236] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633a8599-b1c4-4c8b-83d0-6ca8ac2a731c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.519150] env[62510]: DEBUG nova.compute.provider_tree [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1490.595705] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768453, 'name': CreateVM_Task, 'duration_secs': 0.390305} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.596097] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1490.596607] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.597053] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.597116] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1490.597357] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4f2a0fa-7669-414e-b1d1-cc4f02495da6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.602015] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1490.602015] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521961b6-8a2e-d176-68a6-2ac81bd13f70" [ 1490.602015] env[62510]: _type = "Task" [ 1490.602015] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.610487] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521961b6-8a2e-d176-68a6-2ac81bd13f70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.627484] env[62510]: DEBUG nova.compute.manager [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1490.628359] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39977567-5806-4132-84ae-b163aa77446c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.824970] env[62510]: DEBUG nova.network.neutron [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Updated VIF entry in instance network info cache for port 66e8c35a-f1f1-4dfc-94de-fd2781f02eae. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1490.825348] env[62510]: DEBUG nova.network.neutron [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Updating instance_info_cache with network_info: [{"id": "66e8c35a-f1f1-4dfc-94de-fd2781f02eae", "address": "fa:16:3e:42:93:41", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66e8c35a-f1", "ovs_interfaceid": "66e8c35a-f1f1-4dfc-94de-fd2781f02eae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.952092] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071472} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.952347] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1490.953141] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02009bc6-147a-4e9e-bc85-d04f94374228 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.976821] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] a040671e-941d-4406-81af-f2f7a4b690e4/a040671e-941d-4406-81af-f2f7a4b690e4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1490.977145] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b67b13f7-8e94-4e63-abe2-e0ee5db8ba17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.996611] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1490.996611] env[62510]: value = "task-1768456" [ 1490.996611] env[62510]: _type = "Task" [ 1490.996611] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.004894] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768456, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.023062] env[62510]: DEBUG nova.scheduler.client.report [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1491.112429] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521961b6-8a2e-d176-68a6-2ac81bd13f70, 'name': SearchDatastore_Task, 'duration_secs': 0.04319} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.112737] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.112973] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1491.113216] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.113361] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.113560] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1491.113837] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cb6f20c-dafa-451f-a993-81c251af526f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.122861] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1491.123070] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1491.123770] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8391918c-065a-4f5d-aa7e-9f8a521a3132 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.129105] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1491.129105] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ab09ca-fa64-af46-cfaa-ac8fa1639f7e" [ 1491.129105] env[62510]: _type = "Task" [ 1491.129105] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.137358] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ab09ca-fa64-af46-cfaa-ac8fa1639f7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.139352] env[62510]: INFO nova.compute.manager [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] instance snapshotting [ 1491.141688] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba6d40d-ea2a-4c13-a2eb-dc39e272f5e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.162183] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c27106-61a1-42fd-9740-977888d5db15 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.328216] env[62510]: DEBUG oslo_concurrency.lockutils [req-f4a365b4-5b39-44bc-8a9e-950670c06a0a req-226ea11e-db30-44a6-80b4-40fe61e7804d service nova] Releasing lock "refresh_cache-aca56820-5a06-43dd-9d98-25421f7ef6a6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.352139] env[62510]: DEBUG nova.compute.manager [req-9074ee5d-ea33-460e-aba8-f42b292e5795 req-21adcd30-04c0-415d-ac09-2a46fd79147f service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Received event network-vif-plugged-16130581-d9c2-41e9-8c94-f66f9c4f357c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1491.352459] env[62510]: DEBUG oslo_concurrency.lockutils [req-9074ee5d-ea33-460e-aba8-f42b292e5795 req-21adcd30-04c0-415d-ac09-2a46fd79147f service nova] Acquiring lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.352792] env[62510]: DEBUG oslo_concurrency.lockutils [req-9074ee5d-ea33-460e-aba8-f42b292e5795 req-21adcd30-04c0-415d-ac09-2a46fd79147f service nova] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.352978] env[62510]: DEBUG oslo_concurrency.lockutils [req-9074ee5d-ea33-460e-aba8-f42b292e5795 req-21adcd30-04c0-415d-ac09-2a46fd79147f service nova] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.353176] env[62510]: DEBUG nova.compute.manager [req-9074ee5d-ea33-460e-aba8-f42b292e5795 req-21adcd30-04c0-415d-ac09-2a46fd79147f service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] No waiting events found dispatching network-vif-plugged-16130581-d9c2-41e9-8c94-f66f9c4f357c {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1491.353347] env[62510]: WARNING nova.compute.manager [req-9074ee5d-ea33-460e-aba8-f42b292e5795 req-21adcd30-04c0-415d-ac09-2a46fd79147f service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Received unexpected event network-vif-plugged-16130581-d9c2-41e9-8c94-f66f9c4f357c for instance with vm_state building and task_state spawning. [ 1491.451905] env[62510]: DEBUG nova.network.neutron [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Successfully updated port: 16130581-d9c2-41e9-8c94-f66f9c4f357c {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1491.507034] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768456, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.529439] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.167s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.530704] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.560s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.532192] env[62510]: INFO nova.compute.claims [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1491.549580] env[62510]: INFO nova.scheduler.client.report [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleted allocations for instance c58184e7-bf4f-406b-a778-9b8f60740fe6 [ 1491.639718] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ab09ca-fa64-af46-cfaa-ac8fa1639f7e, 'name': SearchDatastore_Task, 'duration_secs': 0.026321} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.640770] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33888d05-a996-4635-9852-af617d14abec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.646077] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1491.646077] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ba63e9-f412-a585-c1bc-b581ff6437df" [ 1491.646077] env[62510]: _type = "Task" [ 1491.646077] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.652979] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ba63e9-f412-a585-c1bc-b581ff6437df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.673025] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1491.673025] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-684898e8-4848-45aa-8041-b94fcb65b3c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.680387] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1491.680387] env[62510]: value = "task-1768457" [ 1491.680387] env[62510]: _type = "Task" [ 1491.680387] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.688625] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768457, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.954320] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "refresh_cache-8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1491.954635] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired lock "refresh_cache-8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1491.954738] env[62510]: DEBUG nova.network.neutron [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.006845] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768456, 'name': ReconfigVM_Task, 'duration_secs': 0.966993} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.007410] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Reconfigured VM instance instance-0000001c to attach disk [datastore1] a040671e-941d-4406-81af-f2f7a4b690e4/a040671e-941d-4406-81af-f2f7a4b690e4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1492.008026] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d0d9143-afe5-4a3a-a7af-35debf85298b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.013886] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1492.013886] env[62510]: value = "task-1768458" [ 1492.013886] env[62510]: _type = "Task" [ 1492.013886] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.022146] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768458, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.055905] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dce2361b-1a7e-4bbf-a443-577e2767546e tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "c58184e7-bf4f-406b-a778-9b8f60740fe6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.453s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.156958] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ba63e9-f412-a585-c1bc-b581ff6437df, 'name': SearchDatastore_Task, 'duration_secs': 0.021737} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.157264] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.157529] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] aca56820-5a06-43dd-9d98-25421f7ef6a6/aca56820-5a06-43dd-9d98-25421f7ef6a6.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1492.157798] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-253d7a9e-52dc-4233-98ff-72c5f4270ca5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.163970] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1492.163970] env[62510]: value = "task-1768459" [ 1492.163970] env[62510]: _type = "Task" [ 1492.163970] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.171751] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.188998] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768457, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.498827] env[62510]: DEBUG nova.network.neutron [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1492.529451] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768458, 'name': Rename_Task, 'duration_secs': 0.140882} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.530428] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1492.530428] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-247546ec-c499-494d-92b4-d887bb2780d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.537605] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1492.537605] env[62510]: value = "task-1768460" [ 1492.537605] env[62510]: _type = "Task" [ 1492.537605] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.549664] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.675329] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768459, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.690582] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768457, 'name': CreateSnapshot_Task, 'duration_secs': 0.662337} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.690911] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1492.691761] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2119ae-4fb5-438a-91b6-7cabdf8648aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.786090] env[62510]: DEBUG nova.network.neutron [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Updating instance_info_cache with network_info: [{"id": "16130581-d9c2-41e9-8c94-f66f9c4f357c", "address": "fa:16:3e:04:5c:3b", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16130581-d9", "ovs_interfaceid": "16130581-d9c2-41e9-8c94-f66f9c4f357c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.035863] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f84824-bd60-47bb-a469-f2413b1aa03b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.048903] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768460, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.049916] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d42faef-115c-4032-8638-292f5fadc9a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.081591] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e9f0be-3a16-4292-b8c0-16141c1fe5b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.089274] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fccfcbe3-991b-435c-9f17-0f121c09593c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.104603] env[62510]: DEBUG nova.compute.provider_tree [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1493.176295] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513699} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.176592] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] aca56820-5a06-43dd-9d98-25421f7ef6a6/aca56820-5a06-43dd-9d98-25421f7ef6a6.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1493.177104] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1493.177104] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-294040b6-54a3-4e27-b547-e51720afe909 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.184161] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1493.184161] env[62510]: value = "task-1768462" [ 1493.184161] env[62510]: _type = "Task" [ 1493.184161] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.192278] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.213078] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1493.213387] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d5df5c31-2ba5-4718-8b39-8795d6606c14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.220882] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1493.220882] env[62510]: value = "task-1768463" [ 1493.220882] env[62510]: _type = "Task" [ 1493.220882] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.229165] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768463, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.288576] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Releasing lock "refresh_cache-8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.288970] env[62510]: DEBUG nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Instance network_info: |[{"id": "16130581-d9c2-41e9-8c94-f66f9c4f357c", "address": "fa:16:3e:04:5c:3b", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16130581-d9", "ovs_interfaceid": "16130581-d9c2-41e9-8c94-f66f9c4f357c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1493.289446] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:5c:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16130581-d9c2-41e9-8c94-f66f9c4f357c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.297970] env[62510]: DEBUG oslo.service.loopingcall [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.298468] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.298468] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b281eab7-a788-468f-98dc-23f41e97e085 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.320576] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.320576] env[62510]: value = "task-1768464" [ 1493.320576] env[62510]: _type = "Task" [ 1493.320576] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.328960] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768464, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.549908] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768460, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.629755] env[62510]: ERROR nova.scheduler.client.report [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [req-5f7909f5-7dfe-437b-87b5-7e10fda9657b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5f7909f5-7dfe-437b-87b5-7e10fda9657b"}]} [ 1493.646815] env[62510]: DEBUG nova.scheduler.client.report [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1493.665338] env[62510]: DEBUG nova.scheduler.client.report [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1493.665602] env[62510]: DEBUG nova.compute.provider_tree [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1493.670762] env[62510]: DEBUG nova.compute.manager [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Received event network-changed-16130581-d9c2-41e9-8c94-f66f9c4f357c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1493.670977] env[62510]: DEBUG nova.compute.manager [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Refreshing instance network info cache due to event network-changed-16130581-d9c2-41e9-8c94-f66f9c4f357c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1493.672039] env[62510]: DEBUG oslo_concurrency.lockutils [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] Acquiring lock "refresh_cache-8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.672039] env[62510]: DEBUG oslo_concurrency.lockutils [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] Acquired lock "refresh_cache-8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.672039] env[62510]: DEBUG nova.network.neutron [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Refreshing network info cache for port 16130581-d9c2-41e9-8c94-f66f9c4f357c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.681091] env[62510]: DEBUG nova.scheduler.client.report [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1493.694786] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073922} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.695070] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1493.695918] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f19ac9d-6714-49b6-8d13-89c231c4792d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.720561] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] aca56820-5a06-43dd-9d98-25421f7ef6a6/aca56820-5a06-43dd-9d98-25421f7ef6a6.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1493.721505] env[62510]: DEBUG nova.scheduler.client.report [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1493.723835] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7234b064-c5bd-4c78-b56b-a2fd04be353b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.748057] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768463, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.749497] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1493.749497] env[62510]: value = "task-1768465" [ 1493.749497] env[62510]: _type = "Task" [ 1493.749497] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.758071] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768465, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.833732] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768464, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.906865] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.907187] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.907444] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.907647] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.907831] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.912257] env[62510]: INFO nova.compute.manager [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Terminating instance [ 1494.049400] env[62510]: DEBUG oslo_vmware.api [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768460, 'name': PowerOnVM_Task, 'duration_secs': 1.078296} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.051806] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1494.051902] env[62510]: INFO nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Took 8.99 seconds to spawn the instance on the hypervisor. [ 1494.052071] env[62510]: DEBUG nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1494.052981] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c8f033-69ae-4e3d-9651-2071ac97f168 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.133103] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.133331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.133554] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.133738] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.133904] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.136199] env[62510]: INFO nova.compute.manager [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Terminating instance [ 1494.209317] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ef9fbf-5dfa-49cf-a407-bd7135863ffc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.217092] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c399fd3-3913-41ff-bc3b-587b419a675c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.252246] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58331f01-c194-48f6-80b6-151646f1c25d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.265130] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768463, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.269822] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768465, 'name': ReconfigVM_Task, 'duration_secs': 0.298573} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.270201] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Reconfigured VM instance instance-0000001d to attach disk [datastore1] aca56820-5a06-43dd-9d98-25421f7ef6a6/aca56820-5a06-43dd-9d98-25421f7ef6a6.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1494.271857] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d78d3d-ab4e-4bb6-a980-96c88910bdbd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.275794] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57747a9c-0eb0-41e6-aaee-aa01d8c0b66b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.288204] env[62510]: DEBUG nova.compute.provider_tree [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.290762] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1494.290762] env[62510]: value = "task-1768466" [ 1494.290762] env[62510]: _type = "Task" [ 1494.290762] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.303148] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768466, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.331529] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768464, 'name': CreateVM_Task, 'duration_secs': 0.65094} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.331707] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1494.332363] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.332538] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.332907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1494.333570] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aac4411-c7a7-4a5c-aad6-4c2facc5c172 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.340402] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1494.340402] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ed79e3-a97c-6755-9d57-e00d33872372" [ 1494.340402] env[62510]: _type = "Task" [ 1494.340402] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.348292] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ed79e3-a97c-6755-9d57-e00d33872372, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.416670] env[62510]: DEBUG nova.compute.manager [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1494.416838] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.417761] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b783b02-7bad-4b80-b86d-6fdf98ae296e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.425187] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1494.425517] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b35d2654-6249-46fc-be8d-0beb58db35d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.433522] env[62510]: DEBUG oslo_vmware.api [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1494.433522] env[62510]: value = "task-1768467" [ 1494.433522] env[62510]: _type = "Task" [ 1494.433522] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.441990] env[62510]: DEBUG oslo_vmware.api [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.471164] env[62510]: DEBUG nova.network.neutron [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Updated VIF entry in instance network info cache for port 16130581-d9c2-41e9-8c94-f66f9c4f357c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.471537] env[62510]: DEBUG nova.network.neutron [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Updating instance_info_cache with network_info: [{"id": "16130581-d9c2-41e9-8c94-f66f9c4f357c", "address": "fa:16:3e:04:5c:3b", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.62", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16130581-d9", "ovs_interfaceid": "16130581-d9c2-41e9-8c94-f66f9c4f357c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.574453] env[62510]: INFO nova.compute.manager [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Took 41.04 seconds to build instance. [ 1494.639911] env[62510]: DEBUG nova.compute.manager [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1494.640182] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1494.641125] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519af69e-b19a-466c-88b2-dadc61637328 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.651034] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1494.651359] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd29e96c-c88f-45e1-a284-208663842c22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.657350] env[62510]: DEBUG oslo_vmware.api [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1494.657350] env[62510]: value = "task-1768468" [ 1494.657350] env[62510]: _type = "Task" [ 1494.657350] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.666072] env[62510]: DEBUG oslo_vmware.api [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.759423] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768463, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.793081] env[62510]: DEBUG nova.scheduler.client.report [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1494.809313] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768466, 'name': Rename_Task, 'duration_secs': 0.157555} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.810259] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1494.810518] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8078990f-ecd8-4d66-bdeb-3f76e8179e92 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.822042] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1494.822042] env[62510]: value = "task-1768470" [ 1494.822042] env[62510]: _type = "Task" [ 1494.822042] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.838019] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768470, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.851052] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ed79e3-a97c-6755-9d57-e00d33872372, 'name': SearchDatastore_Task, 'duration_secs': 0.010193} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.851386] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.851718] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.851868] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.852027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.852253] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.852525] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83555442-9883-4d2a-8042-5ea9a18be67f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.861735] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.861974] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1494.862702] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3479aa59-5c1d-4c99-967f-e0101e11a8aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.868116] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1494.868116] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e7c2ea-b867-3428-071e-ea665d499a3f" [ 1494.868116] env[62510]: _type = "Task" [ 1494.868116] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.876290] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e7c2ea-b867-3428-071e-ea665d499a3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.943349] env[62510]: DEBUG oslo_vmware.api [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768467, 'name': PowerOffVM_Task, 'duration_secs': 0.308565} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.943547] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1494.943738] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1494.944116] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a54c16f-4bdf-4cd5-a0a3-912b30eeab31 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.974300] env[62510]: DEBUG oslo_concurrency.lockutils [req-795bbaa7-b174-43d9-8ea5-1a56abda7a5a req-8765568e-376b-494b-9b19-7d30af6d06de service nova] Releasing lock "refresh_cache-8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.019416] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1495.019634] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1495.019800] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleting the datastore file [datastore1] 9a1a0428-8ccd-4614-8853-ef3eeec23d55 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1495.020087] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d101ce5-6667-4f09-94ec-001c66a2a9c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.026649] env[62510]: DEBUG oslo_vmware.api [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1495.026649] env[62510]: value = "task-1768472" [ 1495.026649] env[62510]: _type = "Task" [ 1495.026649] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.035061] env[62510]: DEBUG oslo_vmware.api [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.077567] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c645a5a-7d32-48e0-b679-2fbd84d02256 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "a040671e-941d-4406-81af-f2f7a4b690e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.415s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.171196] env[62510]: DEBUG oslo_vmware.api [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768468, 'name': PowerOffVM_Task, 'duration_secs': 0.222735} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.171453] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1495.171623] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1495.171877] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01720aa6-ee04-46d9-9019-e9c6e02da2f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.246571] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1495.246849] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1495.246984] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleting the datastore file [datastore1] 26b283b0-98b4-4a15-abe0-fbf97e1f49eb {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1495.247256] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64a89975-9dd2-4046-9a93-a5f1b902a032 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.258562] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768463, 'name': CloneVM_Task, 'duration_secs': 1.675627} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.260036] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Created linked-clone VM from snapshot [ 1495.260437] env[62510]: DEBUG oslo_vmware.api [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for the task: (returnval){ [ 1495.260437] env[62510]: value = "task-1768474" [ 1495.260437] env[62510]: _type = "Task" [ 1495.260437] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.261245] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cc9c1d-3ef4-4311-afb6-9f317da0b1e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.276852] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Uploading image 14b98911-83eb-411e-8277-cc3f01ab3067 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1495.288049] env[62510]: DEBUG oslo_vmware.api [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.304638] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.774s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.305309] env[62510]: DEBUG nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1495.312028] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.044s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.312294] env[62510]: DEBUG nova.objects.instance [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lazy-loading 'resources' on Instance uuid 35a98028-0fc6-4e13-b50d-5dacf205dbe5 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.321447] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1495.321447] env[62510]: value = "vm-367281" [ 1495.321447] env[62510]: _type = "VirtualMachine" [ 1495.321447] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1495.321743] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a49b1fc9-43a9-4d8d-9c0f-10ebc6803e52 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.337161] env[62510]: DEBUG oslo_vmware.api [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768470, 'name': PowerOnVM_Task, 'duration_secs': 0.493328} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.337455] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1495.337676] env[62510]: INFO nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1495.337849] env[62510]: DEBUG nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1495.338726] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d180a9e7-e047-45bd-b6d5-24ac6cd67c03 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.343439] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lease: (returnval){ [ 1495.343439] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c3a2be-ed21-3f35-a89a-efacf8bd814a" [ 1495.343439] env[62510]: _type = "HttpNfcLease" [ 1495.343439] env[62510]: } obtained for exporting VM: (result){ [ 1495.343439] env[62510]: value = "vm-367281" [ 1495.343439] env[62510]: _type = "VirtualMachine" [ 1495.343439] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1495.343689] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the lease: (returnval){ [ 1495.343689] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c3a2be-ed21-3f35-a89a-efacf8bd814a" [ 1495.343689] env[62510]: _type = "HttpNfcLease" [ 1495.343689] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1495.355500] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1495.355500] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c3a2be-ed21-3f35-a89a-efacf8bd814a" [ 1495.355500] env[62510]: _type = "HttpNfcLease" [ 1495.355500] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1495.355857] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1495.355857] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c3a2be-ed21-3f35-a89a-efacf8bd814a" [ 1495.355857] env[62510]: _type = "HttpNfcLease" [ 1495.355857] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1495.356632] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c868917-6b1b-4190-ac86-697b162286a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.364939] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5251cd02-6de8-3837-af39-48016477d7a0/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1495.365152] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5251cd02-6de8-3837-af39-48016477d7a0/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1495.445641] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e7c2ea-b867-3428-071e-ea665d499a3f, 'name': SearchDatastore_Task, 'duration_secs': 0.009848} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.446918] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7746b6dd-dcfa-44b1-b3d7-67098947e89d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.453792] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1495.453792] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bf89da-7da8-2c7b-b322-9a7f0004ef64" [ 1495.453792] env[62510]: _type = "Task" [ 1495.453792] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.462656] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bf89da-7da8-2c7b-b322-9a7f0004ef64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.476339] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7f818777-d50a-41fe-ad3a-168d31285713 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.539420] env[62510]: DEBUG oslo_vmware.api [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276591} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.539767] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1495.540249] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1495.540340] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1495.540520] env[62510]: INFO nova.compute.manager [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1495.540846] env[62510]: DEBUG oslo.service.loopingcall [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.541110] env[62510]: DEBUG nova.compute.manager [-] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1495.541240] env[62510]: DEBUG nova.network.neutron [-] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1495.580240] env[62510]: DEBUG nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1495.783710] env[62510]: DEBUG oslo_vmware.api [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Task: {'id': task-1768474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.351087} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.784751] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1495.785470] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1495.786133] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1495.786660] env[62510]: INFO nova.compute.manager [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1495.788555] env[62510]: DEBUG oslo.service.loopingcall [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.788954] env[62510]: DEBUG nova.compute.manager [-] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1495.791026] env[62510]: DEBUG nova.network.neutron [-] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1495.816417] env[62510]: DEBUG nova.compute.utils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1495.823336] env[62510]: DEBUG nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1495.824198] env[62510]: DEBUG nova.network.neutron [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1495.869618] env[62510]: INFO nova.compute.manager [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Took 34.81 seconds to build instance. [ 1495.900673] env[62510]: DEBUG nova.policy [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da50a3ca3cd14a109573a5f5da2ceef0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c995c2427bd4f7da644d0a8df7d69da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1495.941048] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "a040671e-941d-4406-81af-f2f7a4b690e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.941542] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "a040671e-941d-4406-81af-f2f7a4b690e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.941766] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "a040671e-941d-4406-81af-f2f7a4b690e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.942018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "a040671e-941d-4406-81af-f2f7a4b690e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.942137] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "a040671e-941d-4406-81af-f2f7a4b690e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.949071] env[62510]: INFO nova.compute.manager [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Terminating instance [ 1495.966868] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bf89da-7da8-2c7b-b322-9a7f0004ef64, 'name': SearchDatastore_Task, 'duration_secs': 0.010504} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.970241] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.970589] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be/8e3cefa1-fab9-469e-8a32-31b4a8ecf4be.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1495.975467] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ec4ec25-16d5-4564-9063-a46057234ccf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.985022] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1495.985022] env[62510]: value = "task-1768476" [ 1495.985022] env[62510]: _type = "Task" [ 1495.985022] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.999562] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.069556] env[62510]: DEBUG nova.compute.manager [req-18ebc91f-13ff-416e-8ea2-f999902ab91d req-2d5ff4dc-71d9-4adc-9870-cd537e4a3bbe service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Received event network-vif-deleted-7477b8b7-c766-4c58-a1dc-9db9f24198b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1496.069845] env[62510]: INFO nova.compute.manager [req-18ebc91f-13ff-416e-8ea2-f999902ab91d req-2d5ff4dc-71d9-4adc-9870-cd537e4a3bbe service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Neutron deleted interface 7477b8b7-c766-4c58-a1dc-9db9f24198b6; detaching it from the instance and deleting it from the info cache [ 1496.070051] env[62510]: DEBUG nova.network.neutron [req-18ebc91f-13ff-416e-8ea2-f999902ab91d req-2d5ff4dc-71d9-4adc-9870-cd537e4a3bbe service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.108434] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.269319] env[62510]: DEBUG nova.network.neutron [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Successfully created port: b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1496.320658] env[62510]: DEBUG nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1496.374820] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5694e3ad-a69d-4721-8d52-7505a24eee44 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.994s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.453951] env[62510]: DEBUG nova.compute.manager [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1496.454390] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1496.454765] env[62510]: DEBUG nova.network.neutron [-] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.457625] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90453254-5c6f-4e56-992e-1300e59db469 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.468402] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1496.472242] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93147767-48d0-4dcf-a298-ecea26d277a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.483246] env[62510]: DEBUG oslo_vmware.api [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1496.483246] env[62510]: value = "task-1768477" [ 1496.483246] env[62510]: _type = "Task" [ 1496.483246] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.499548] env[62510]: DEBUG oslo_vmware.api [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.506028] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768476, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.573485] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a23cac0d-9566-4afb-8627-9778bc8d8aa9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.589804] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2378c6-4ad8-4a29-901e-047572a6db76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.605186] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9596206-0042-4ea8-b703-ac2c6c3b032c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.613239] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4fad9c-5bde-4514-8298-7d0947905c40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.636309] env[62510]: DEBUG nova.compute.manager [req-18ebc91f-13ff-416e-8ea2-f999902ab91d req-2d5ff4dc-71d9-4adc-9870-cd537e4a3bbe service nova] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Detach interface failed, port_id=7477b8b7-c766-4c58-a1dc-9db9f24198b6, reason: Instance 9a1a0428-8ccd-4614-8853-ef3eeec23d55 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1496.665145] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbd01fb-d72f-424d-b05d-a7c4e0937b4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.673808] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c8c298-01c9-41dd-9ae7-bbabda79f3b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.688987] env[62510]: DEBUG nova.compute.provider_tree [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.810546] env[62510]: DEBUG nova.network.neutron [-] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.880218] env[62510]: DEBUG nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1496.957325] env[62510]: INFO nova.compute.manager [-] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Took 1.42 seconds to deallocate network for instance. [ 1496.995127] env[62510]: DEBUG oslo_vmware.api [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768477, 'name': PowerOffVM_Task, 'duration_secs': 0.360529} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.995918] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1496.996214] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1496.996510] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82774c23-6ba3-427f-ada9-6493933e0e0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.002246] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768476, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.647689} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.003049] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be/8e3cefa1-fab9-469e-8a32-31b4a8ecf4be.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1497.003226] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1497.004465] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48babfb9-a189-4dd9-a45c-8ba417a94128 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.016742] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1497.016742] env[62510]: value = "task-1768479" [ 1497.016742] env[62510]: _type = "Task" [ 1497.016742] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.028707] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768479, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.093802] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1497.094252] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1497.094597] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Deleting the datastore file [datastore1] a040671e-941d-4406-81af-f2f7a4b690e4 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1497.095091] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f014212-f2ed-4290-955d-65c7c74b6e6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.102504] env[62510]: DEBUG oslo_vmware.api [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for the task: (returnval){ [ 1497.102504] env[62510]: value = "task-1768480" [ 1497.102504] env[62510]: _type = "Task" [ 1497.102504] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.113493] env[62510]: DEBUG oslo_vmware.api [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.193116] env[62510]: DEBUG nova.scheduler.client.report [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1497.313621] env[62510]: INFO nova.compute.manager [-] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Took 1.52 seconds to deallocate network for instance. [ 1497.343072] env[62510]: DEBUG nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1497.366467] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1497.366834] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1497.367081] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1497.367510] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1497.367798] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1497.368127] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1497.368455] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1497.368624] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1497.368793] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1497.368955] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1497.369144] env[62510]: DEBUG nova.virt.hardware [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1497.370371] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703ce678-44d0-4a91-9f27-22ea46bebea1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.379701] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f749fe3c-0f4a-49bd-9cc5-f89e3f7161d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.413552] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.463525] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.513987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "aca56820-5a06-43dd-9d98-25421f7ef6a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.514370] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.514647] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "aca56820-5a06-43dd-9d98-25421f7ef6a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.514838] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.515088] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.517378] env[62510]: INFO nova.compute.manager [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Terminating instance [ 1497.529456] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768479, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075192} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.529791] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1497.530647] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c6b7d9-a447-4c22-90fa-d93e414e4ac5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.555389] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be/8e3cefa1-fab9-469e-8a32-31b4a8ecf4be.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.556151] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76ccb152-5a2b-4696-9dd4-15051ad3ae0c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.578194] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1497.578194] env[62510]: value = "task-1768481" [ 1497.578194] env[62510]: _type = "Task" [ 1497.578194] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.587211] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768481, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.612329] env[62510]: DEBUG oslo_vmware.api [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Task: {'id': task-1768480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.441419} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.612681] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1497.612876] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1497.613275] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1497.613355] env[62510]: INFO nova.compute.manager [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1497.613836] env[62510]: DEBUG oslo.service.loopingcall [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1497.614263] env[62510]: DEBUG nova.compute.manager [-] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1497.614375] env[62510]: DEBUG nova.network.neutron [-] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1497.698854] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.387s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.701785] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.691s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.704326] env[62510]: INFO nova.compute.claims [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1497.722125] env[62510]: INFO nova.scheduler.client.report [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleted allocations for instance 35a98028-0fc6-4e13-b50d-5dacf205dbe5 [ 1497.822421] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.947975] env[62510]: DEBUG nova.network.neutron [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Successfully updated port: b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1498.024903] env[62510]: DEBUG nova.compute.manager [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1498.025149] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1498.026286] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba8fa73-c9ca-495d-832e-7a204d424581 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.037337] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1498.037676] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2eaadcc-6375-4d11-af64-0f4fcfa9aa80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.048434] env[62510]: DEBUG oslo_vmware.api [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1498.048434] env[62510]: value = "task-1768482" [ 1498.048434] env[62510]: _type = "Task" [ 1498.048434] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.057912] env[62510]: DEBUG oslo_vmware.api [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.063488] env[62510]: DEBUG nova.compute.manager [req-8928001b-b789-4468-a058-a77ec42db6aa req-602cfca2-438b-4eac-9f45-232e44768bec service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received event network-vif-plugged-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1498.063767] env[62510]: DEBUG oslo_concurrency.lockutils [req-8928001b-b789-4468-a058-a77ec42db6aa req-602cfca2-438b-4eac-9f45-232e44768bec service nova] Acquiring lock "b004fba7-13e0-40f0-827d-8d09b7717176-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.063997] env[62510]: DEBUG oslo_concurrency.lockutils [req-8928001b-b789-4468-a058-a77ec42db6aa req-602cfca2-438b-4eac-9f45-232e44768bec service nova] Lock "b004fba7-13e0-40f0-827d-8d09b7717176-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.064178] env[62510]: DEBUG oslo_concurrency.lockutils [req-8928001b-b789-4468-a058-a77ec42db6aa req-602cfca2-438b-4eac-9f45-232e44768bec service nova] Lock "b004fba7-13e0-40f0-827d-8d09b7717176-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.064674] env[62510]: DEBUG nova.compute.manager [req-8928001b-b789-4468-a058-a77ec42db6aa req-602cfca2-438b-4eac-9f45-232e44768bec service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] No waiting events found dispatching network-vif-plugged-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1498.065181] env[62510]: WARNING nova.compute.manager [req-8928001b-b789-4468-a058-a77ec42db6aa req-602cfca2-438b-4eac-9f45-232e44768bec service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received unexpected event network-vif-plugged-b93d3484-b909-4060-aef6-1f45f91f2325 for instance with vm_state building and task_state spawning. [ 1498.093334] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768481, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.231980] env[62510]: DEBUG oslo_concurrency.lockutils [None req-07db52b9-245e-484a-81e5-f30046c17e0f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "35a98028-0fc6-4e13-b50d-5dacf205dbe5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.891s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.435954] env[62510]: DEBUG nova.compute.manager [req-308df72e-f444-4697-baf7-82e59fb34b0a req-fc6de7b1-2f18-4ac3-a434-42fe68af634b service nova] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Received event network-vif-deleted-909eb33f-63c2-4175-9250-a6557ad136f0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1498.443454] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Acquiring lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.444134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.454021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.454021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.454021] env[62510]: DEBUG nova.network.neutron [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1498.560369] env[62510]: DEBUG oslo_vmware.api [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768482, 'name': PowerOffVM_Task, 'duration_secs': 0.2542} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.560678] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.560854] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1498.561701] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c390e56f-3a82-4292-8095-927591f6e666 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.589479] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768481, 'name': ReconfigVM_Task, 'duration_secs': 0.661186} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.589789] env[62510]: DEBUG nova.network.neutron [-] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.591104] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be/8e3cefa1-fab9-469e-8a32-31b4a8ecf4be.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1498.591775] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4597e94a-202e-47a3-b32d-cf4640ff81ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.599416] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1498.599416] env[62510]: value = "task-1768484" [ 1498.599416] env[62510]: _type = "Task" [ 1498.599416] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.611040] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768484, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.661236] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.661236] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.661236] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Deleting the datastore file [datastore1] aca56820-5a06-43dd-9d98-25421f7ef6a6 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1498.661504] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4d25dda-a446-428b-b749-a8879329e3d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.669759] env[62510]: DEBUG oslo_vmware.api [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for the task: (returnval){ [ 1498.669759] env[62510]: value = "task-1768485" [ 1498.669759] env[62510]: _type = "Task" [ 1498.669759] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.679635] env[62510]: DEBUG oslo_vmware.api [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.999862] env[62510]: DEBUG nova.network.neutron [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1499.095294] env[62510]: INFO nova.compute.manager [-] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Took 1.48 seconds to deallocate network for instance. [ 1499.114441] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768484, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.190704] env[62510]: DEBUG oslo_vmware.api [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Task: {'id': task-1768485, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.496348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.194725] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.195082] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.195380] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.195679] env[62510]: INFO nova.compute.manager [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1499.196053] env[62510]: DEBUG oslo.service.loopingcall [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.196675] env[62510]: DEBUG nova.compute.manager [-] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1499.196823] env[62510]: DEBUG nova.network.neutron [-] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.210787] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.213582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.213582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.213582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.213582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.215707] env[62510]: INFO nova.compute.manager [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Terminating instance [ 1499.263062] env[62510]: DEBUG nova.network.neutron [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [{"id": "b93d3484-b909-4060-aef6-1f45f91f2325", "address": "fa:16:3e:db:60:53", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d3484-b9", "ovs_interfaceid": "b93d3484-b909-4060-aef6-1f45f91f2325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.379414] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d637c1-9f53-4d7f-a8c1-d8350d58ea6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.388829] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44e4c5f-8738-4357-a37b-d08ddb2c44bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.422583] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249e54aa-a6ab-4f58-a858-b43ab7bff497 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.430380] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f45bb9-7cdf-4d67-bd46-c0aaae18c295 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.446462] env[62510]: DEBUG nova.compute.provider_tree [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.607149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.613321] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768484, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.723168] env[62510]: DEBUG nova.compute.manager [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1499.723391] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1499.724314] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1ef0c8-4be5-4c7f-b6db-a64e8614c944 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.732692] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1499.732954] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1df61eb-4a0d-4f92-b38c-ceb21bc1d607 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.739739] env[62510]: DEBUG oslo_vmware.api [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1499.739739] env[62510]: value = "task-1768486" [ 1499.739739] env[62510]: _type = "Task" [ 1499.739739] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.747765] env[62510]: DEBUG oslo_vmware.api [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.768014] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Releasing lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.768370] env[62510]: DEBUG nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Instance network_info: |[{"id": "b93d3484-b909-4060-aef6-1f45f91f2325", "address": "fa:16:3e:db:60:53", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d3484-b9", "ovs_interfaceid": "b93d3484-b909-4060-aef6-1f45f91f2325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1499.768870] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:60:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2153f70-3d14-42ab-8bb3-be78296dd3b8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b93d3484-b909-4060-aef6-1f45f91f2325', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1499.777765] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Creating folder: Project (1c995c2427bd4f7da644d0a8df7d69da). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1499.778093] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a1e9176-843f-406f-96f9-ef3cfd7d2923 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.788720] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Created folder: Project (1c995c2427bd4f7da644d0a8df7d69da) in parent group-v367197. [ 1499.788960] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Creating folder: Instances. Parent ref: group-v367283. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1499.789229] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d42177d1-6303-441d-98c8-e346b66cf132 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.800555] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Created folder: Instances in parent group-v367283. [ 1499.800555] env[62510]: DEBUG oslo.service.loopingcall [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.800555] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1499.800749] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddfa6da0-1c8e-4df9-b70b-5fc3d4dcb5db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.823479] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1499.823479] env[62510]: value = "task-1768489" [ 1499.823479] env[62510]: _type = "Task" [ 1499.823479] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.834067] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768489, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.950832] env[62510]: DEBUG nova.scheduler.client.report [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1499.997507] env[62510]: DEBUG nova.network.neutron [-] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.104302] env[62510]: DEBUG nova.compute.manager [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1500.104497] env[62510]: DEBUG nova.compute.manager [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing instance network info cache due to event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1500.104757] env[62510]: DEBUG oslo_concurrency.lockutils [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] Acquiring lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.104905] env[62510]: DEBUG oslo_concurrency.lockutils [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] Acquired lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.105255] env[62510]: DEBUG nova.network.neutron [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1500.117681] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768484, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.250596] env[62510]: DEBUG oslo_vmware.api [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768486, 'name': PowerOffVM_Task, 'duration_secs': 0.193502} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.250943] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1500.251130] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1500.251386] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f00412e6-c6c4-4d4a-8c58-470f3cd7d9d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.331058] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1500.331058] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1500.331486] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleting the datastore file [datastore1] 75e06a24-b96c-4a42-bc2d-b0b960e3301a {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1500.331990] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ece01174-9155-4b92-abeb-cf22f8db5db5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.337567] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768489, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.342902] env[62510]: DEBUG oslo_vmware.api [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1500.342902] env[62510]: value = "task-1768491" [ 1500.342902] env[62510]: _type = "Task" [ 1500.342902] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.351439] env[62510]: DEBUG oslo_vmware.api [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.457040] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.755s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.458166] env[62510]: DEBUG nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1500.460297] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.219s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1500.460542] env[62510]: DEBUG nova.objects.instance [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lazy-loading 'resources' on Instance uuid 7490c825-dfd5-409c-9fd6-0e78643338fb {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.479591] env[62510]: DEBUG nova.compute.manager [req-3d7c397a-cabb-41a7-a7a2-792665d9ca05 req-11e3c703-be2f-4fcd-9f5b-692e5189c6d7 service nova] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Received event network-vif-deleted-152cd402-ef01-4f90-a464-27b8e3ac0650 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1500.500101] env[62510]: INFO nova.compute.manager [-] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Took 1.30 seconds to deallocate network for instance. [ 1500.620738] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768484, 'name': Rename_Task, 'duration_secs': 1.867742} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.621056] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1500.621307] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b75797f1-f654-40bc-a239-86c999b3990f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.627652] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1500.627652] env[62510]: value = "task-1768492" [ 1500.627652] env[62510]: _type = "Task" [ 1500.627652] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.635461] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.836668] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768489, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.853687] env[62510]: DEBUG oslo_vmware.api [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326495} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.854119] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1500.854424] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1500.857027] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1500.857027] env[62510]: INFO nova.compute.manager [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1500.857027] env[62510]: DEBUG oslo.service.loopingcall [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.857027] env[62510]: DEBUG nova.compute.manager [-] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1500.857027] env[62510]: DEBUG nova.network.neutron [-] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1500.964194] env[62510]: DEBUG nova.compute.utils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1500.972526] env[62510]: DEBUG nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1500.972963] env[62510]: DEBUG nova.network.neutron [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1501.006453] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.013444] env[62510]: DEBUG nova.network.neutron [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updated VIF entry in instance network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1501.013989] env[62510]: DEBUG nova.network.neutron [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [{"id": "b93d3484-b909-4060-aef6-1f45f91f2325", "address": "fa:16:3e:db:60:53", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d3484-b9", "ovs_interfaceid": "b93d3484-b909-4060-aef6-1f45f91f2325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.081641] env[62510]: DEBUG nova.policy [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a406a2bf0ccd4b99ba7dcb359a9b640e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e144c0bd2d124193a65ad53de8c43039', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1501.138927] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768492, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.335125] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768489, 'name': CreateVM_Task, 'duration_secs': 1.040794} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.337640] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1501.338501] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.338668] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.338974] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1501.339247] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a7cb70e-c42f-4220-84e9-2295993c8bb7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.344273] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1501.344273] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5256e33c-428d-5afc-db61-95b24ff77df8" [ 1501.344273] env[62510]: _type = "Task" [ 1501.344273] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.354316] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5256e33c-428d-5afc-db61-95b24ff77df8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.473770] env[62510]: DEBUG nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1501.517357] env[62510]: DEBUG oslo_concurrency.lockutils [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] Releasing lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.517684] env[62510]: DEBUG nova.compute.manager [req-d30a083f-a91b-452e-b678-c1b98cf77d96 req-ba9db0a2-ff68-40db-926d-90da0ce95296 service nova] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Received event network-vif-deleted-66e8c35a-f1f1-4dfc-94de-fd2781f02eae {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1501.592394] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727bafa1-2e08-4448-87e0-a9704009e69c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.601941] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3a52ed-5250-4177-bc8d-bce2b66329b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.644787] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f68aa9b-1b18-4ee4-85dd-352079394143 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.655393] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce25917a-5a68-4b95-b081-26f61daf1e09 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.659238] env[62510]: DEBUG oslo_vmware.api [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768492, 'name': PowerOnVM_Task, 'duration_secs': 0.835148} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.659500] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1501.659705] env[62510]: INFO nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Took 11.29 seconds to spawn the instance on the hypervisor. [ 1501.659884] env[62510]: DEBUG nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1501.660967] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acabdb5-696d-483c-85fe-c2c5dc26f392 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.671396] env[62510]: DEBUG nova.compute.provider_tree [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.854324] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5256e33c-428d-5afc-db61-95b24ff77df8, 'name': SearchDatastore_Task, 'duration_secs': 0.04483} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.854635] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.854881] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1501.855123] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.855270] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.855449] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1501.855740] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f8a656b-748e-43e2-b572-f3e59c1274d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.871462] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1501.871628] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1501.872387] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0b2f7e7-e256-4637-bcf4-e1677fe90c35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.877917] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1501.877917] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5271016b-71b5-4e13-2204-47b27049721f" [ 1501.877917] env[62510]: _type = "Task" [ 1501.877917] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.882221] env[62510]: DEBUG nova.network.neutron [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Successfully created port: 7a3daaec-85e6-418b-a6c1-a74dcb3b41ac {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1501.891464] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5271016b-71b5-4e13-2204-47b27049721f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.030057] env[62510]: DEBUG nova.network.neutron [-] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1502.183155] env[62510]: DEBUG nova.scheduler.client.report [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1502.191829] env[62510]: INFO nova.compute.manager [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Took 38.86 seconds to build instance. [ 1502.392113] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5271016b-71b5-4e13-2204-47b27049721f, 'name': SearchDatastore_Task, 'duration_secs': 0.040517} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.392113] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cafd1aa-470d-4b18-8a13-643b5689754d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.398612] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1502.398612] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521a4a89-f050-c63f-372d-d5c28e306496" [ 1502.398612] env[62510]: _type = "Task" [ 1502.398612] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.408164] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521a4a89-f050-c63f-372d-d5c28e306496, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.485494] env[62510]: DEBUG nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1502.518519] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1502.518863] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1502.519086] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1502.519338] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1502.519609] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1502.519820] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1502.520603] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1502.520879] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1502.521273] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1502.521615] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1502.521898] env[62510]: DEBUG nova.virt.hardware [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1502.524402] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2029b19d-4982-4ed0-9559-400033ccc865 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.528879] env[62510]: DEBUG nova.compute.manager [req-b87537ca-23c9-455e-b8d9-b7f777f3cc04 req-2e7223a4-c060-43ea-8f4a-83edbaf907ab service nova] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Received event network-vif-deleted-6faa955b-378f-4f0d-9181-22e9295cf131 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1502.531364] env[62510]: INFO nova.compute.manager [-] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Took 1.68 seconds to deallocate network for instance. [ 1502.539630] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f545503-a491-4d47-be8b-b50ed7a07fef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.690575] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.230s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.693054] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.835s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.694363] env[62510]: INFO nova.compute.claims [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1502.697649] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167b471e-b5ac-4f9d-8210-1ebcb4c7d216 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.706s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.714305] env[62510]: INFO nova.scheduler.client.report [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Deleted allocations for instance 7490c825-dfd5-409c-9fd6-0e78643338fb [ 1502.909724] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521a4a89-f050-c63f-372d-d5c28e306496, 'name': SearchDatastore_Task, 'duration_secs': 0.020379} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.910159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.910491] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b004fba7-13e0-40f0-827d-8d09b7717176/b004fba7-13e0-40f0-827d-8d09b7717176.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1502.910796] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8bdddc41-80a0-4e91-856c-9f0d2eb53d70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.918506] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1502.918506] env[62510]: value = "task-1768493" [ 1502.918506] env[62510]: _type = "Task" [ 1502.918506] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.927265] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.007754] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.008195] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.008513] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.008817] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.009125] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.012795] env[62510]: INFO nova.compute.manager [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Terminating instance [ 1503.046397] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.202724] env[62510]: DEBUG nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1503.223083] env[62510]: DEBUG oslo_concurrency.lockutils [None req-568d4908-b200-4728-8122-5c64ebc394d9 tempest-ServersAdmin275Test-1394601813 tempest-ServersAdmin275Test-1394601813-project-member] Lock "7490c825-dfd5-409c-9fd6-0e78643338fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.304s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.430698] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768493, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.517422] env[62510]: DEBUG nova.compute.manager [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1503.517814] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1503.518976] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172a9a49-03ba-42ce-8450-9791b6178e77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.528600] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1503.528789] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a0bee9b-bd54-4b89-94a7-a0c97decc23c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.536673] env[62510]: DEBUG oslo_vmware.api [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1503.536673] env[62510]: value = "task-1768494" [ 1503.536673] env[62510]: _type = "Task" [ 1503.536673] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.545925] env[62510]: DEBUG oslo_vmware.api [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.736015] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.783450] env[62510]: DEBUG nova.network.neutron [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Successfully updated port: 7a3daaec-85e6-418b-a6c1-a74dcb3b41ac {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1503.935424] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768493, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709378} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.935706] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b004fba7-13e0-40f0-827d-8d09b7717176/b004fba7-13e0-40f0-827d-8d09b7717176.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1503.935943] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1503.936217] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b851af2-6d67-409f-b694-a36b0e4b5077 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.946277] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1503.946277] env[62510]: value = "task-1768495" [ 1503.946277] env[62510]: _type = "Task" [ 1503.946277] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.954508] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.046934] env[62510]: DEBUG oslo_vmware.api [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768494, 'name': PowerOffVM_Task, 'duration_secs': 0.277165} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.046934] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1504.047111] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1504.047600] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7714b61d-1a51-4815-a660-c2aeb624f146 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.247587] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba5ad15-6923-45e4-a2d0-261b37c0001d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.255169] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef67bbf2-9071-42c2-bb2d-53fb8f345da4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.285634] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a33e5f4-3671-40f1-9128-9ae2f9c79eca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.288634] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-0158d7af-d3bb-4d9c-a7c6-fbab943977e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.288774] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-0158d7af-d3bb-4d9c-a7c6-fbab943977e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.288927] env[62510]: DEBUG nova.network.neutron [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1504.297197] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba257f4-7ce9-4062-9316-9d77592f02a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.316087] env[62510]: DEBUG nova.compute.provider_tree [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.318450] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1504.318800] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1504.318888] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Deleting the datastore file [datastore1] 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1504.319484] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dde6ff6-5700-4f03-b52e-72e67d187056 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.326968] env[62510]: DEBUG oslo_vmware.api [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for the task: (returnval){ [ 1504.326968] env[62510]: value = "task-1768497" [ 1504.326968] env[62510]: _type = "Task" [ 1504.326968] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.336374] env[62510]: DEBUG oslo_vmware.api [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.458025] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093994} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.458025] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1504.458612] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1000ea7c-be5d-42b2-9b95-e9c192f83646 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.481941] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] b004fba7-13e0-40f0-827d-8d09b7717176/b004fba7-13e0-40f0-827d-8d09b7717176.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1504.482163] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5762547f-020f-42b6-a999-38bb248fcfe9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.503296] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1504.503296] env[62510]: value = "task-1768498" [ 1504.503296] env[62510]: _type = "Task" [ 1504.503296] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.512031] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768498, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.558667] env[62510]: DEBUG nova.compute.manager [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Received event network-vif-plugged-7a3daaec-85e6-418b-a6c1-a74dcb3b41ac {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1504.558777] env[62510]: DEBUG oslo_concurrency.lockutils [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] Acquiring lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.559206] env[62510]: DEBUG oslo_concurrency.lockutils [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.559206] env[62510]: DEBUG oslo_concurrency.lockutils [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.559349] env[62510]: DEBUG nova.compute.manager [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] No waiting events found dispatching network-vif-plugged-7a3daaec-85e6-418b-a6c1-a74dcb3b41ac {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1504.559500] env[62510]: WARNING nova.compute.manager [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Received unexpected event network-vif-plugged-7a3daaec-85e6-418b-a6c1-a74dcb3b41ac for instance with vm_state building and task_state spawning. [ 1504.559658] env[62510]: DEBUG nova.compute.manager [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Received event network-changed-7a3daaec-85e6-418b-a6c1-a74dcb3b41ac {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1504.559812] env[62510]: DEBUG nova.compute.manager [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Refreshing instance network info cache due to event network-changed-7a3daaec-85e6-418b-a6c1-a74dcb3b41ac. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1504.559975] env[62510]: DEBUG oslo_concurrency.lockutils [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] Acquiring lock "refresh_cache-0158d7af-d3bb-4d9c-a7c6-fbab943977e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.822784] env[62510]: DEBUG nova.scheduler.client.report [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1504.837900] env[62510]: DEBUG oslo_vmware.api [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Task: {'id': task-1768497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289178} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.838860] env[62510]: DEBUG nova.network.neutron [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1504.841101] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1504.841340] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1504.841584] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1504.841857] env[62510]: INFO nova.compute.manager [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1504.842170] env[62510]: DEBUG oslo.service.loopingcall [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.843160] env[62510]: DEBUG nova.compute.manager [-] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1504.843438] env[62510]: DEBUG nova.network.neutron [-] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1505.016289] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768498, 'name': ReconfigVM_Task, 'duration_secs': 0.500888} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.021017] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Reconfigured VM instance instance-00000020 to attach disk [datastore1] b004fba7-13e0-40f0-827d-8d09b7717176/b004fba7-13e0-40f0-827d-8d09b7717176.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1505.021017] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c934aa06-e7d7-4d46-b66f-c01eca7f8847 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.023381] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1505.023381] env[62510]: value = "task-1768499" [ 1505.023381] env[62510]: _type = "Task" [ 1505.023381] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.032879] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768499, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.214110] env[62510]: DEBUG nova.network.neutron [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Updating instance_info_cache with network_info: [{"id": "7a3daaec-85e6-418b-a6c1-a74dcb3b41ac", "address": "fa:16:3e:b8:27:64", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a3daaec-85", "ovs_interfaceid": "7a3daaec-85e6-418b-a6c1-a74dcb3b41ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.333245] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.334890] env[62510]: DEBUG nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1505.338655] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.719s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.341405] env[62510]: INFO nova.compute.claims [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1505.539023] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768499, 'name': Rename_Task, 'duration_secs': 0.185713} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.539023] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1505.539023] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcf8954d-3f8c-4726-85e4-1418869734d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.546111] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1505.546111] env[62510]: value = "task-1768500" [ 1505.546111] env[62510]: _type = "Task" [ 1505.546111] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.554110] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768500, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.717543] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-0158d7af-d3bb-4d9c-a7c6-fbab943977e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.718349] env[62510]: DEBUG nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Instance network_info: |[{"id": "7a3daaec-85e6-418b-a6c1-a74dcb3b41ac", "address": "fa:16:3e:b8:27:64", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a3daaec-85", "ovs_interfaceid": "7a3daaec-85e6-418b-a6c1-a74dcb3b41ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1505.718733] env[62510]: DEBUG oslo_concurrency.lockutils [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] Acquired lock "refresh_cache-0158d7af-d3bb-4d9c-a7c6-fbab943977e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.719067] env[62510]: DEBUG nova.network.neutron [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Refreshing network info cache for port 7a3daaec-85e6-418b-a6c1-a74dcb3b41ac {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1505.721706] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:27:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a3daaec-85e6-418b-a6c1-a74dcb3b41ac', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1505.733698] env[62510]: DEBUG oslo.service.loopingcall [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1505.737840] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1505.738436] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-051699de-50d8-4951-b4fd-5eb1fc0ded35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.760185] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1505.760185] env[62510]: value = "task-1768501" [ 1505.760185] env[62510]: _type = "Task" [ 1505.760185] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.769304] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768501, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.848128] env[62510]: DEBUG nova.compute.utils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1505.855376] env[62510]: DEBUG nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1505.855645] env[62510]: DEBUG nova.network.neutron [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1505.889899] env[62510]: DEBUG nova.network.neutron [-] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.959123] env[62510]: DEBUG nova.policy [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97a7f1ca55d549a3985e95b6bbc665f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94a46473611d4b22be7c66c909d1b348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1506.060361] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768500, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.074946] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5251cd02-6de8-3837-af39-48016477d7a0/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1506.076033] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41903de9-0890-4efe-9778-8fe382465c97 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.083040] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5251cd02-6de8-3837-af39-48016477d7a0/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1506.083040] env[62510]: ERROR oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5251cd02-6de8-3837-af39-48016477d7a0/disk-0.vmdk due to incomplete transfer. [ 1506.083343] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fb9ed5af-31ac-401e-9360-d10246e85e5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.091669] env[62510]: DEBUG oslo_vmware.rw_handles [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5251cd02-6de8-3837-af39-48016477d7a0/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1506.091975] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Uploaded image 14b98911-83eb-411e-8277-cc3f01ab3067 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1506.094167] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1506.094450] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e7de7bc9-f763-4613-b44e-b5f06a22e698 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.100879] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1506.100879] env[62510]: value = "task-1768502" [ 1506.100879] env[62510]: _type = "Task" [ 1506.100879] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.109650] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768502, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.204488] env[62510]: DEBUG nova.network.neutron [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Updated VIF entry in instance network info cache for port 7a3daaec-85e6-418b-a6c1-a74dcb3b41ac. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1506.204910] env[62510]: DEBUG nova.network.neutron [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Updating instance_info_cache with network_info: [{"id": "7a3daaec-85e6-418b-a6c1-a74dcb3b41ac", "address": "fa:16:3e:b8:27:64", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a3daaec-85", "ovs_interfaceid": "7a3daaec-85e6-418b-a6c1-a74dcb3b41ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1506.273186] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768501, 'name': CreateVM_Task, 'duration_secs': 0.433348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.273400] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1506.274423] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.274702] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.275357] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1506.275920] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd45b1cf-de82-4f50-b6c6-5298568c450f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.283688] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1506.283688] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52287ca0-3f56-e337-d327-a1d063354124" [ 1506.283688] env[62510]: _type = "Task" [ 1506.283688] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.297948] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52287ca0-3f56-e337-d327-a1d063354124, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.354477] env[62510]: DEBUG nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1506.394961] env[62510]: INFO nova.compute.manager [-] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Took 1.55 seconds to deallocate network for instance. [ 1506.511105] env[62510]: DEBUG nova.network.neutron [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Successfully created port: c5e41d27-e1b7-40e9-9e95-de2a680aded2 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1506.557512] env[62510]: DEBUG oslo_vmware.api [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768500, 'name': PowerOnVM_Task, 'duration_secs': 0.620293} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.557823] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.557993] env[62510]: INFO nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Took 9.21 seconds to spawn the instance on the hypervisor. [ 1506.558212] env[62510]: DEBUG nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1506.560208] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933c6f27-d240-4bc4-a632-4938c459fc0c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.613081] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768502, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.709148] env[62510]: DEBUG oslo_concurrency.lockutils [req-e00d1b08-2e37-4a76-ad80-6d3ae31b99db req-dddfdf48-b19b-414a-9141-8a8182bc1e14 service nova] Releasing lock "refresh_cache-0158d7af-d3bb-4d9c-a7c6-fbab943977e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.795375] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52287ca0-3f56-e337-d327-a1d063354124, 'name': SearchDatastore_Task, 'duration_secs': 0.018433} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.795641] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1506.795867] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1506.796142] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.796243] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1506.796460] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1506.796890] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1a73971-fee9-4e8a-8af9-e0b395a3c44f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.809880] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1506.810026] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1506.810786] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02b814e0-1ca9-4640-9329-0cc81fabf6c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.821019] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1506.821019] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521dbd2b-f2b4-8d07-691b-519614bb1898" [ 1506.821019] env[62510]: _type = "Task" [ 1506.821019] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.827165] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521dbd2b-f2b4-8d07-691b-519614bb1898, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.907369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.908885] env[62510]: DEBUG nova.compute.manager [req-8796303c-955d-4ea0-b464-d48b9a16958f req-69bbbbb8-800b-4767-8c9c-b254ef8d4b8c service nova] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Received event network-vif-deleted-16130581-d9c2-41e9-8c94-f66f9c4f357c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1507.013910] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4ccc3a-78b8-4cea-8acc-22950b5ffbfb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.022619] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd66bf8-58cb-465a-bee1-29ada4ff64b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.056484] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbef6c7-212e-42b4-9571-2330a58bccd4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.063946] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1f4222-cb76-4511-87b5-651c37152124 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.082774] env[62510]: DEBUG nova.compute.provider_tree [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.089198] env[62510]: INFO nova.compute.manager [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Took 40.14 seconds to build instance. [ 1507.112451] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768502, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.330390] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521dbd2b-f2b4-8d07-691b-519614bb1898, 'name': SearchDatastore_Task, 'duration_secs': 0.070524} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.331653] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0528bc70-7399-4e34-b9be-27c528d54ed2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.337298] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1507.337298] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529b65df-e2d7-df15-c089-0d6a5064ee29" [ 1507.337298] env[62510]: _type = "Task" [ 1507.337298] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.345596] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529b65df-e2d7-df15-c089-0d6a5064ee29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.375021] env[62510]: DEBUG nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1507.401115] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1507.402152] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1507.402152] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1507.402152] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1507.402152] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1507.402152] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1507.402494] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1507.402714] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1507.402951] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1507.404094] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1507.404094] env[62510]: DEBUG nova.virt.hardware [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1507.404337] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dada5574-6f2e-46e2-a896-da7c6809ee04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.412558] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18993e1e-50cf-4da9-9d01-7408abbe72ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.590454] env[62510]: DEBUG nova.scheduler.client.report [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1507.594786] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52a918e3-326e-4fb5-b2fe-64044981b70d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "b004fba7-13e0-40f0-827d-8d09b7717176" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.036s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.615337] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768502, 'name': Destroy_Task, 'duration_secs': 1.218686} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.615597] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Destroyed the VM [ 1507.615947] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1507.616986] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-defb40d5-aa47-4d6c-8947-931695c58aee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.623729] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1507.623729] env[62510]: value = "task-1768503" [ 1507.623729] env[62510]: _type = "Task" [ 1507.623729] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.631471] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768503, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.851278] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529b65df-e2d7-df15-c089-0d6a5064ee29, 'name': SearchDatastore_Task, 'duration_secs': 0.052263} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.851278] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1507.851278] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1507.852649] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5344f723-e3ee-44be-a2d7-e356a7f289ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.861858] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1507.861858] env[62510]: value = "task-1768504" [ 1507.861858] env[62510]: _type = "Task" [ 1507.861858] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.877196] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.098106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.759s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.098860] env[62510]: DEBUG nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1508.101949] env[62510]: DEBUG nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1508.105150] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.501s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.105388] env[62510]: DEBUG nova.objects.instance [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lazy-loading 'resources' on Instance uuid d42295c9-2b0e-471e-9a87-1d7367de9588 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1508.137043] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768503, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.376466] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768504, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490941} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.377256] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1508.377256] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1508.377364] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc27a09c-b0b1-4b71-a977-c72a3347ceca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.384699] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1508.384699] env[62510]: value = "task-1768505" [ 1508.384699] env[62510]: _type = "Task" [ 1508.384699] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.392728] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.412331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "e3850272-9dae-4164-8f0e-f5513af23f49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.412615] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "e3850272-9dae-4164-8f0e-f5513af23f49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.533431] env[62510]: DEBUG nova.network.neutron [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Successfully updated port: c5e41d27-e1b7-40e9-9e95-de2a680aded2 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1508.606832] env[62510]: DEBUG nova.compute.utils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1508.608245] env[62510]: DEBUG nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1508.608424] env[62510]: DEBUG nova.network.neutron [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1508.638642] env[62510]: DEBUG oslo_vmware.api [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768503, 'name': RemoveSnapshot_Task, 'duration_secs': 0.960662} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.638994] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1508.639267] env[62510]: INFO nova.compute.manager [None req-2c86c01b-5b0c-45d4-a494-490e04190793 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Took 17.50 seconds to snapshot the instance on the hypervisor. [ 1508.647720] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.705031] env[62510]: DEBUG nova.policy [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cdbd43af76041c3a16e24bc5e8cb4a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5f069093317e433cb38a868215dbf03d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1508.894756] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080536} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.895185] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1508.896132] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045f4d8d-45d8-4a4e-b355-e6ca646aaeec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.921515] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1508.924246] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14c60b83-4f9f-4cd5-965c-1b051da8b766 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.944111] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1508.944111] env[62510]: value = "task-1768506" [ 1508.944111] env[62510]: _type = "Task" [ 1508.944111] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.954408] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768506, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.037018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "refresh_cache-e9711202-67f3-4fe2-befb-f28722ddea33" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.037018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "refresh_cache-e9711202-67f3-4fe2-befb-f28722ddea33" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.037018] env[62510]: DEBUG nova.network.neutron [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1509.128124] env[62510]: DEBUG nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1509.170876] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af11aa59-4ef2-41be-8d33-6fda7fc55524 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.181367] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4ce282-002c-4c26-b81f-1db75b5487b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.220427] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac98bbf-0a88-4d9b-995e-ec209f78bdc6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.227322] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5e4fb9-0a89-4826-931d-faacc379579c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.240970] env[62510]: DEBUG nova.compute.provider_tree [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1509.278699] env[62510]: DEBUG nova.network.neutron [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Successfully created port: 0d218053-8e39-4829-a3ad-5837c5fbb1e2 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1509.454418] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768506, 'name': ReconfigVM_Task, 'duration_secs': 0.294319} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.454683] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1509.455303] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1b692a6-6b94-409c-acd9-402144b510fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.461245] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1509.461245] env[62510]: value = "task-1768507" [ 1509.461245] env[62510]: _type = "Task" [ 1509.461245] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.469591] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768507, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.564169] env[62510]: DEBUG nova.compute.manager [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Received event network-vif-plugged-c5e41d27-e1b7-40e9-9e95-de2a680aded2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1509.564392] env[62510]: DEBUG oslo_concurrency.lockutils [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] Acquiring lock "e9711202-67f3-4fe2-befb-f28722ddea33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.564596] env[62510]: DEBUG oslo_concurrency.lockutils [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] Lock "e9711202-67f3-4fe2-befb-f28722ddea33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.565044] env[62510]: DEBUG oslo_concurrency.lockutils [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] Lock "e9711202-67f3-4fe2-befb-f28722ddea33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.565044] env[62510]: DEBUG nova.compute.manager [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] No waiting events found dispatching network-vif-plugged-c5e41d27-e1b7-40e9-9e95-de2a680aded2 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1509.565199] env[62510]: WARNING nova.compute.manager [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Received unexpected event network-vif-plugged-c5e41d27-e1b7-40e9-9e95-de2a680aded2 for instance with vm_state building and task_state spawning. [ 1509.565359] env[62510]: DEBUG nova.compute.manager [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Received event network-changed-c5e41d27-e1b7-40e9-9e95-de2a680aded2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1509.565508] env[62510]: DEBUG nova.compute.manager [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Refreshing instance network info cache due to event network-changed-c5e41d27-e1b7-40e9-9e95-de2a680aded2. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1509.565672] env[62510]: DEBUG oslo_concurrency.lockutils [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] Acquiring lock "refresh_cache-e9711202-67f3-4fe2-befb-f28722ddea33" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.614735] env[62510]: DEBUG nova.network.neutron [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1509.745560] env[62510]: DEBUG nova.scheduler.client.report [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1509.891423] env[62510]: DEBUG nova.network.neutron [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Updating instance_info_cache with network_info: [{"id": "c5e41d27-e1b7-40e9-9e95-de2a680aded2", "address": "fa:16:3e:8f:44:9a", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e41d27-e1", "ovs_interfaceid": "c5e41d27-e1b7-40e9-9e95-de2a680aded2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.971630] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768507, 'name': Rename_Task, 'duration_secs': 0.166107} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.971906] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1509.972160] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f154178-94d9-47b9-bfc0-493b4262fdfd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.978867] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1509.978867] env[62510]: value = "task-1768508" [ 1509.978867] env[62510]: _type = "Task" [ 1509.978867] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.988457] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.139888] env[62510]: DEBUG nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1510.160762] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1510.161086] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1510.161212] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1510.161414] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1510.161566] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1510.161715] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1510.161939] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1510.162122] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1510.162303] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1510.162471] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1510.162710] env[62510]: DEBUG nova.virt.hardware [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1510.163621] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119e781c-420a-4a1d-aee9-6e7da4314816 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.171631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b60efc2-7afb-4e13-978a-2daf5cf954a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.251075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.253736] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.546s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.255739] env[62510]: INFO nova.compute.claims [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1510.272827] env[62510]: INFO nova.scheduler.client.report [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Deleted allocations for instance d42295c9-2b0e-471e-9a87-1d7367de9588 [ 1510.393743] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "refresh_cache-e9711202-67f3-4fe2-befb-f28722ddea33" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.394098] env[62510]: DEBUG nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Instance network_info: |[{"id": "c5e41d27-e1b7-40e9-9e95-de2a680aded2", "address": "fa:16:3e:8f:44:9a", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e41d27-e1", "ovs_interfaceid": "c5e41d27-e1b7-40e9-9e95-de2a680aded2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1510.394404] env[62510]: DEBUG oslo_concurrency.lockutils [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] Acquired lock "refresh_cache-e9711202-67f3-4fe2-befb-f28722ddea33" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.394584] env[62510]: DEBUG nova.network.neutron [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Refreshing network info cache for port c5e41d27-e1b7-40e9-9e95-de2a680aded2 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1510.395874] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:44:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5e41d27-e1b7-40e9-9e95-de2a680aded2', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1510.404661] env[62510]: DEBUG oslo.service.loopingcall [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.405270] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1510.405643] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b0b64dc-d215-4829-a72e-f9636580f9b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.427564] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1510.427564] env[62510]: value = "task-1768509" [ 1510.427564] env[62510]: _type = "Task" [ 1510.427564] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.437360] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768509, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.489978] env[62510]: DEBUG oslo_vmware.api [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768508, 'name': PowerOnVM_Task, 'duration_secs': 0.466294} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.490166] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1510.490371] env[62510]: INFO nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Took 8.00 seconds to spawn the instance on the hypervisor. [ 1510.490576] env[62510]: DEBUG nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1510.491526] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79630c5a-63b7-410d-a6b1-546730c6e40f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.780377] env[62510]: DEBUG oslo_concurrency.lockutils [None req-91bfa699-17df-4e55-8e2a-7936ad6bf4a9 tempest-ServerExternalEventsTest-1380506733 tempest-ServerExternalEventsTest-1380506733-project-member] Lock "d42295c9-2b0e-471e-9a87-1d7367de9588" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.662s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.938734] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768509, 'name': CreateVM_Task, 'duration_secs': 0.379795} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.938918] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1510.939552] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.939708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.940042] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1510.940303] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00c979b8-e3e5-4525-841a-83a1fb84fbf6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.945252] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1510.945252] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ad50bb-8621-9623-cc65-e65307eca760" [ 1510.945252] env[62510]: _type = "Task" [ 1510.945252] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.953159] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ad50bb-8621-9623-cc65-e65307eca760, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.014743] env[62510]: INFO nova.compute.manager [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Took 43.02 seconds to build instance. [ 1511.053700] env[62510]: DEBUG nova.compute.manager [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1511.054678] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b63bbed-3fd5-46f5-99f9-b405fdcceb50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.104426] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.104716] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.338028] env[62510]: DEBUG nova.network.neutron [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Updated VIF entry in instance network info cache for port c5e41d27-e1b7-40e9-9e95-de2a680aded2. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1511.338028] env[62510]: DEBUG nova.network.neutron [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Updating instance_info_cache with network_info: [{"id": "c5e41d27-e1b7-40e9-9e95-de2a680aded2", "address": "fa:16:3e:8f:44:9a", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5e41d27-e1", "ovs_interfaceid": "c5e41d27-e1b7-40e9-9e95-de2a680aded2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.416305] env[62510]: DEBUG nova.network.neutron [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Successfully updated port: 0d218053-8e39-4829-a3ad-5837c5fbb1e2 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1511.455520] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ad50bb-8621-9623-cc65-e65307eca760, 'name': SearchDatastore_Task, 'duration_secs': 0.03828} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.458045] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.458332] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1511.458533] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.458680] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.458857] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.459593] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ab61c5b-c853-4ebd-bc1c-471a7aaf2b35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.473819] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.474099] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1511.474908] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35d5e039-2089-40e4-bd35-168301417879 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.482976] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1511.482976] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d764c5-1b34-6310-4c83-5293e42e6035" [ 1511.482976] env[62510]: _type = "Task" [ 1511.482976] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.490479] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d764c5-1b34-6310-4c83-5293e42e6035, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.517020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-517f78a5-cf4d-4462-a085-deb4e58ed617 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.885s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.570359] env[62510]: INFO nova.compute.manager [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] instance snapshotting [ 1511.574020] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c57005-f329-4fe8-aa52-90e9a436f281 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.598666] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9832deed-a49c-4a74-a248-6a592ab61a0b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.687119] env[62510]: DEBUG nova.compute.manager [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Received event network-vif-plugged-0d218053-8e39-4829-a3ad-5837c5fbb1e2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1511.687119] env[62510]: DEBUG oslo_concurrency.lockutils [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] Acquiring lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.687119] env[62510]: DEBUG oslo_concurrency.lockutils [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.687119] env[62510]: DEBUG oslo_concurrency.lockutils [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.687119] env[62510]: DEBUG nova.compute.manager [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] No waiting events found dispatching network-vif-plugged-0d218053-8e39-4829-a3ad-5837c5fbb1e2 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1511.687119] env[62510]: WARNING nova.compute.manager [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Received unexpected event network-vif-plugged-0d218053-8e39-4829-a3ad-5837c5fbb1e2 for instance with vm_state building and task_state spawning. [ 1511.687119] env[62510]: DEBUG nova.compute.manager [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Received event network-changed-0d218053-8e39-4829-a3ad-5837c5fbb1e2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1511.687119] env[62510]: DEBUG nova.compute.manager [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Refreshing instance network info cache due to event network-changed-0d218053-8e39-4829-a3ad-5837c5fbb1e2. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1511.687119] env[62510]: DEBUG oslo_concurrency.lockutils [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] Acquiring lock "refresh_cache-a09a34de-fe7c-414b-8a89-2e9271c72a5c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.687119] env[62510]: DEBUG oslo_concurrency.lockutils [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] Acquired lock "refresh_cache-a09a34de-fe7c-414b-8a89-2e9271c72a5c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.687797] env[62510]: DEBUG nova.network.neutron [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Refreshing network info cache for port 0d218053-8e39-4829-a3ad-5837c5fbb1e2 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1511.792113] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e4077c-f2f6-478b-9315-21d5cb4005e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.798638] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b82bcf2-791a-46ae-9dba-f9167a786c0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.834225] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abd3b7b-1a6c-46d1-b85d-649b408bc9ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.842401] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46deec78-8485-430c-917a-3af96aa72747 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.849234] env[62510]: DEBUG oslo_concurrency.lockutils [req-73793638-1571-474a-877e-75bfc485fc0f req-78b7a70a-8e40-4a59-8d5c-8a4061b45ac2 service nova] Releasing lock "refresh_cache-e9711202-67f3-4fe2-befb-f28722ddea33" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.859611] env[62510]: DEBUG nova.compute.provider_tree [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.923106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "refresh_cache-a09a34de-fe7c-414b-8a89-2e9271c72a5c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.994524] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d764c5-1b34-6310-4c83-5293e42e6035, 'name': SearchDatastore_Task, 'duration_secs': 0.027585} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.994524] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0b38182-f29d-4c58-b189-a9882c56a7c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.003075] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1512.003075] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522cd759-f0d7-0463-663d-dee1dd09553c" [ 1512.003075] env[62510]: _type = "Task" [ 1512.003075] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.010374] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522cd759-f0d7-0463-663d-dee1dd09553c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.020320] env[62510]: DEBUG nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1512.109842] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1512.110825] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3a4a8805-61f2-4590-83e7-aa8a7bdff879 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.118350] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1512.118350] env[62510]: value = "task-1768510" [ 1512.118350] env[62510]: _type = "Task" [ 1512.118350] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.127212] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768510, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.238955] env[62510]: DEBUG nova.network.neutron [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1512.362770] env[62510]: DEBUG nova.scheduler.client.report [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1512.367529] env[62510]: DEBUG nova.network.neutron [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.519582] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522cd759-f0d7-0463-663d-dee1dd09553c, 'name': SearchDatastore_Task, 'duration_secs': 0.015342} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.519872] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.520164] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e9711202-67f3-4fe2-befb-f28722ddea33/e9711202-67f3-4fe2-befb-f28722ddea33.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1512.520441] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26c58613-6ffc-4fec-8ab6-1b122ee49791 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.527981] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1512.527981] env[62510]: value = "task-1768511" [ 1512.527981] env[62510]: _type = "Task" [ 1512.527981] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.539461] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.544951] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.628938] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768510, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.869316] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.869861] env[62510]: DEBUG nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1512.873669] env[62510]: DEBUG oslo_concurrency.lockutils [req-5eb60b16-1bbd-4132-8664-e4e71bf6d951 req-299e77dd-c87d-4d7c-813f-10b83164ccf6 service nova] Releasing lock "refresh_cache-a09a34de-fe7c-414b-8a89-2e9271c72a5c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.876510] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 29.759s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.876510] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquired lock "refresh_cache-a09a34de-fe7c-414b-8a89-2e9271c72a5c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.876649] env[62510]: DEBUG nova.network.neutron [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.996021] env[62510]: INFO nova.compute.manager [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Rebuilding instance [ 1513.033027] env[62510]: DEBUG nova.compute.manager [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1513.033935] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8a119d-2ed1-414c-bd94-980597c40f14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.048979] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.130363] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768510, 'name': CreateSnapshot_Task, 'duration_secs': 0.562321} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.130642] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1513.131544] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e7be34-222d-492b-88d9-f5f72b428d5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.377166] env[62510]: DEBUG nova.compute.utils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1513.379053] env[62510]: DEBUG nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1513.379249] env[62510]: DEBUG nova.network.neutron [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1513.543740] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.651053] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1513.651527] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e35990f7-77b8-4181-b0b1-5ee1932f5957 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.664661] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1513.664661] env[62510]: value = "task-1768512" [ 1513.664661] env[62510]: _type = "Task" [ 1513.664661] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.674020] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768512, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.677194] env[62510]: DEBUG nova.network.neutron [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1513.758527] env[62510]: DEBUG nova.policy [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c5050179d59b45d7a3e63c7d090e9181', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b2875fef23d486a900e5909a704c64b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1513.897342] env[62510]: DEBUG nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1513.936039] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.936039] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance b7c2c768-573b-4c1c-ade7-45fb87b95d41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.936039] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8bbafd7f-cdd1-4246-a509-2f97a6f78497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.936039] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1513.936039] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2d2ab209-8072-4e64-8170-50d96d71bc54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.936039] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 34a464e2-d38e-4c24-a487-c62a4f484667 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1513.936039] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance b004fba7-13e0-40f0-827d-8d09b7717176 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1514.044625] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.050319] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1514.050579] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a1a2da2-7d19-49b1-8300-29d8060f9717 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.053870] env[62510]: DEBUG nova.network.neutron [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Updating instance_info_cache with network_info: [{"id": "0d218053-8e39-4829-a3ad-5837c5fbb1e2", "address": "fa:16:3e:34:48:c6", "network": {"id": "dc2f3113-dcd9-48b8-aa8e-dd974b160cdd", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-789276194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f069093317e433cb38a868215dbf03d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d218053-8e", "ovs_interfaceid": "0d218053-8e39-4829-a3ad-5837c5fbb1e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.059791] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1514.059791] env[62510]: value = "task-1768513" [ 1514.059791] env[62510]: _type = "Task" [ 1514.059791] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.065268] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.176631] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768512, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.339613] env[62510]: DEBUG nova.network.neutron [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Successfully created port: 62729ef9-4eb8-410c-94fc-a7cd602e845d {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1514.441908] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance bd21dd81-c0d9-4ff1-9183-0b4622dc5afb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1514.547894] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768511, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.561024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Releasing lock "refresh_cache-a09a34de-fe7c-414b-8a89-2e9271c72a5c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.561024] env[62510]: DEBUG nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Instance network_info: |[{"id": "0d218053-8e39-4829-a3ad-5837c5fbb1e2", "address": "fa:16:3e:34:48:c6", "network": {"id": "dc2f3113-dcd9-48b8-aa8e-dd974b160cdd", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-789276194-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5f069093317e433cb38a868215dbf03d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d218053-8e", "ovs_interfaceid": "0d218053-8e39-4829-a3ad-5837c5fbb1e2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1514.561024] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:48:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d218053-8e39-4829-a3ad-5837c5fbb1e2', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1514.567868] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Creating folder: Project (5f069093317e433cb38a868215dbf03d). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.571948] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e549b18e-913e-4d5b-9b71-79cd66e6c105 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.580278] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768513, 'name': PowerOffVM_Task, 'duration_secs': 0.363174} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.580909] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1514.581835] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1514.582544] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f28168-bb7c-4cca-a544-f338ce155c32 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.588700] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Created folder: Project (5f069093317e433cb38a868215dbf03d) in parent group-v367197. [ 1514.589135] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Creating folder: Instances. Parent ref: group-v367290. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.592093] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-357eda4a-3ad3-43ae-9d8e-254794e78c44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.594535] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1514.595414] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c033a81c-be90-4115-9439-998fe3bc6868 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.608201] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Created folder: Instances in parent group-v367290. [ 1514.608201] env[62510]: DEBUG oslo.service.loopingcall [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1514.608201] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1514.608201] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f19738e1-3dad-4990-9691-6729ac9e4e8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.630543] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1514.630543] env[62510]: value = "task-1768517" [ 1514.630543] env[62510]: _type = "Task" [ 1514.630543] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.646248] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768517, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.673270] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768512, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.689812] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1514.690133] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1514.690362] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleting the datastore file [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1514.690648] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d283e2f2-cf06-4b6b-83e4-25929f57049d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.699486] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1514.699486] env[62510]: value = "task-1768518" [ 1514.699486] env[62510]: _type = "Task" [ 1514.699486] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.707528] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.914419] env[62510]: DEBUG nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1514.937624] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1514.938017] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1514.938236] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1514.938434] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1514.938582] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.938727] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1514.938938] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1514.939114] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1514.939283] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1514.939449] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1514.939620] env[62510]: DEBUG nova.virt.hardware [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1514.940500] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3121abf2-3618-4d0a-be0a-b3faabf39bf1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.945088] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 83fa0d32-18ee-401d-af0b-a0adb538e5f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1514.945265] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance a040671e-941d-4406-81af-f2f7a4b690e4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1514.945390] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 612e95d6-28ef-4c9a-b5d9-fd83122bfa44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1514.950341] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda5311a-1604-4963-b6bc-846ba3808ae7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.046658] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768511, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.128605} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.046964] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e9711202-67f3-4fe2-befb-f28722ddea33/e9711202-67f3-4fe2-befb-f28722ddea33.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1515.047199] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1515.047447] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73291118-8fab-4de6-b816-20ffbfe6d387 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.054047] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1515.054047] env[62510]: value = "task-1768519" [ 1515.054047] env[62510]: _type = "Task" [ 1515.054047] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.061543] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768519, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.140164] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768517, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.173629] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768512, 'name': CloneVM_Task, 'duration_secs': 1.262941} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.173629] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Created linked-clone VM from snapshot [ 1515.174161] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fed6031-771f-4b4a-993e-90bf72dda28e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.182460] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Uploading image 8fef84d7-72c6-43e1-819e-80c97d1b7e1b {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1515.205414] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1515.205414] env[62510]: value = "vm-367289" [ 1515.205414] env[62510]: _type = "VirtualMachine" [ 1515.205414] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1515.205669] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-146bcbef-811a-4fe9-8b08-d1ec69e5d7cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.214163] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172925} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.214163] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1515.214163] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1515.214384] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1515.221378] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lease: (returnval){ [ 1515.221378] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d82b0b-4f6d-d32d-7f71-5f7aef1fc2d2" [ 1515.221378] env[62510]: _type = "HttpNfcLease" [ 1515.221378] env[62510]: } obtained for exporting VM: (result){ [ 1515.221378] env[62510]: value = "vm-367289" [ 1515.221378] env[62510]: _type = "VirtualMachine" [ 1515.221378] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1515.221378] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the lease: (returnval){ [ 1515.221378] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d82b0b-4f6d-d32d-7f71-5f7aef1fc2d2" [ 1515.221378] env[62510]: _type = "HttpNfcLease" [ 1515.221378] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1515.225876] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1515.225876] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d82b0b-4f6d-d32d-7f71-5f7aef1fc2d2" [ 1515.225876] env[62510]: _type = "HttpNfcLease" [ 1515.225876] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1515.450758] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f9eb5110-28ec-474e-b80e-0bfcee51483d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1515.563965] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768519, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.205277} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.564265] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1515.565079] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10558e0-9df7-4113-b1bb-72463b10156f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.588334] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] e9711202-67f3-4fe2-befb-f28722ddea33/e9711202-67f3-4fe2-befb-f28722ddea33.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1515.588662] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce99a88d-7c94-4954-b5f7-7f6d5b847b16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.609428] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1515.609428] env[62510]: value = "task-1768521" [ 1515.609428] env[62510]: _type = "Task" [ 1515.609428] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.617416] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.641111] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768517, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.731328] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1515.731328] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d82b0b-4f6d-d32d-7f71-5f7aef1fc2d2" [ 1515.731328] env[62510]: _type = "HttpNfcLease" [ 1515.731328] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1515.732178] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1515.732178] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d82b0b-4f6d-d32d-7f71-5f7aef1fc2d2" [ 1515.732178] env[62510]: _type = "HttpNfcLease" [ 1515.732178] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1515.732934] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863e6791-4b66-4b6c-8604-9de91a11bbbb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.741225] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c2824-d5a2-cec8-2445-0bb9834f25e0/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1515.741422] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c2824-d5a2-cec8-2445-0bb9834f25e0/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1515.830596] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cee8b8af-2e7f-47c7-a259-74d7c055f2a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.953319] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e3850272-9dae-4164-8f0e-f5513af23f49 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1515.953910] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance a09a34de-fe7c-414b-8a89-2e9271c72a5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.053928] env[62510]: DEBUG nova.compute.manager [req-a65d40ea-4f16-4f6a-b85d-d6cade15b5c4 req-79286ae3-3629-4112-a61b-c001bc9d8c95 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Received event network-vif-plugged-62729ef9-4eb8-410c-94fc-a7cd602e845d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1516.054150] env[62510]: DEBUG oslo_concurrency.lockutils [req-a65d40ea-4f16-4f6a-b85d-d6cade15b5c4 req-79286ae3-3629-4112-a61b-c001bc9d8c95 service nova] Acquiring lock "2d2ab209-8072-4e64-8170-50d96d71bc54-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.054307] env[62510]: DEBUG oslo_concurrency.lockutils [req-a65d40ea-4f16-4f6a-b85d-d6cade15b5c4 req-79286ae3-3629-4112-a61b-c001bc9d8c95 service nova] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.054521] env[62510]: DEBUG oslo_concurrency.lockutils [req-a65d40ea-4f16-4f6a-b85d-d6cade15b5c4 req-79286ae3-3629-4112-a61b-c001bc9d8c95 service nova] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.054657] env[62510]: DEBUG nova.compute.manager [req-a65d40ea-4f16-4f6a-b85d-d6cade15b5c4 req-79286ae3-3629-4112-a61b-c001bc9d8c95 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] No waiting events found dispatching network-vif-plugged-62729ef9-4eb8-410c-94fc-a7cd602e845d {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1516.054893] env[62510]: WARNING nova.compute.manager [req-a65d40ea-4f16-4f6a-b85d-d6cade15b5c4 req-79286ae3-3629-4112-a61b-c001bc9d8c95 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Received unexpected event network-vif-plugged-62729ef9-4eb8-410c-94fc-a7cd602e845d for instance with vm_state building and task_state spawning. [ 1516.119956] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768521, 'name': ReconfigVM_Task, 'duration_secs': 0.443753} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.120444] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Reconfigured VM instance instance-00000022 to attach disk [datastore1] e9711202-67f3-4fe2-befb-f28722ddea33/e9711202-67f3-4fe2-befb-f28722ddea33.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1516.121187] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42209210-c8c4-41c5-9724-d41bf2914ffd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.124567] env[62510]: DEBUG nova.network.neutron [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Successfully updated port: 62729ef9-4eb8-410c-94fc-a7cd602e845d {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1516.128212] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1516.128212] env[62510]: value = "task-1768522" [ 1516.128212] env[62510]: _type = "Task" [ 1516.128212] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.139334] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768522, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.145061] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768517, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.271424] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1516.271904] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1516.272183] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1516.272470] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1516.272721] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1516.273065] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1516.274223] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1516.275955] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1516.275955] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1516.275955] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1516.275955] env[62510]: DEBUG nova.virt.hardware [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1516.276760] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4214bd8-0dd0-481d-8dcc-d1f3304d7905 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.285095] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71a5a8c-12bd-49aa-a3ca-f269f77c8976 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.302345] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:27:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a3daaec-85e6-418b-a6c1-a74dcb3b41ac', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1516.310292] env[62510]: DEBUG oslo.service.loopingcall [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1516.310629] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1516.310960] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a628e3ef-0328-4792-8db6-0e22625bef88 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.335407] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1516.335407] env[62510]: value = "task-1768523" [ 1516.335407] env[62510]: _type = "Task" [ 1516.335407] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.345099] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768523, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.456947] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2c5c38c1-511f-4aae-969a-eb6de128fae7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1516.627306] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "refresh_cache-2d2ab209-8072-4e64-8170-50d96d71bc54" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.627497] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired lock "refresh_cache-2d2ab209-8072-4e64-8170-50d96d71bc54" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.627643] env[62510]: DEBUG nova.network.neutron [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1516.641590] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768522, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.645516] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768517, 'name': CreateVM_Task, 'duration_secs': 1.609945} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.646089] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1516.646381] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.646543] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.646861] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1516.647204] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bc918eb-621f-4aff-90d4-c37d7c116517 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.651847] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1516.651847] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c2c094-8687-742d-aefd-5e0fa899c993" [ 1516.651847] env[62510]: _type = "Task" [ 1516.651847] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.660253] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c2c094-8687-742d-aefd-5e0fa899c993, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.845623] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768523, 'name': CreateVM_Task, 'duration_secs': 0.325184} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.845921] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1516.846565] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.960158] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2c5d137d-4fd5-4035-a04f-bdb76e90edd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1516.960461] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.960530] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8ffa27e9-6a3b-48d1-aed4-c808089788d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.960760] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e9711202-67f3-4fe2-befb-f28722ddea33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1516.961348] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 26b283b0-98b4-4a15-abe0-fbf97e1f49eb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1516.961800] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9a1a0428-8ccd-4614-8853-ef3eeec23d55 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1516.961985] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance cfe53f9c-d78b-4af7-b991-f3549c03f22d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.140950] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768522, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.162302] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c2c094-8687-742d-aefd-5e0fa899c993, 'name': SearchDatastore_Task, 'duration_secs': 0.010791} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.162904] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.163280] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1517.163816] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.164209] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.164614] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1517.165135] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.165467] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1517.165788] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-202d236b-fdc7-41bd-8b97-9bc5e4211e20 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.167845] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-571b5f41-0095-4347-a56f-7e624616bf2a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.172965] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1517.172965] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522fcb70-9948-5806-90e3-8b0867fee112" [ 1517.172965] env[62510]: _type = "Task" [ 1517.172965] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.173932] env[62510]: DEBUG nova.network.neutron [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1517.180299] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1517.180477] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1517.181264] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-952c74ae-4c40-4006-acfa-c6a06fa010a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.188211] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522fcb70-9948-5806-90e3-8b0867fee112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.189479] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1517.189479] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529145aa-c214-02d0-5417-edc67eb2b781" [ 1517.189479] env[62510]: _type = "Task" [ 1517.189479] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.197344] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529145aa-c214-02d0-5417-edc67eb2b781, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.327648] env[62510]: DEBUG nova.network.neutron [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Updating instance_info_cache with network_info: [{"id": "62729ef9-4eb8-410c-94fc-a7cd602e845d", "address": "fa:16:3e:6a:fc:1c", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62729ef9-4e", "ovs_interfaceid": "62729ef9-4eb8-410c-94fc-a7cd602e845d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1517.465157] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9d5d29ea-be92-4881-9fc8-fea3f2f442d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1517.465346] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0a940fd0-73cc-403d-9afc-a989c67dfdef is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1517.465476] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 12768001-6ed0-47be-8f20-c59ee82b842a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.639027] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768522, 'name': Rename_Task, 'duration_secs': 1.163335} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.639521] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1517.639918] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d8635c1-d063-4438-822f-35a0f103ae0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.647800] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1517.647800] env[62510]: value = "task-1768524" [ 1517.647800] env[62510]: _type = "Task" [ 1517.647800] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.656397] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768524, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.686051] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522fcb70-9948-5806-90e3-8b0867fee112, 'name': SearchDatastore_Task, 'duration_secs': 0.02242} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.686389] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.686639] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1517.686904] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.699857] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529145aa-c214-02d0-5417-edc67eb2b781, 'name': SearchDatastore_Task, 'duration_secs': 0.02398} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.700743] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dec54d38-2dca-434d-8abf-4835d98c413d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.706419] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1517.706419] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f469c2-88ac-bcd8-b857-c8194955cbb7" [ 1517.706419] env[62510]: _type = "Task" [ 1517.706419] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.715609] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f469c2-88ac-bcd8-b857-c8194955cbb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.830772] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Releasing lock "refresh_cache-2d2ab209-8072-4e64-8170-50d96d71bc54" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.831013] env[62510]: DEBUG nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Instance network_info: |[{"id": "62729ef9-4eb8-410c-94fc-a7cd602e845d", "address": "fa:16:3e:6a:fc:1c", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62729ef9-4e", "ovs_interfaceid": "62729ef9-4eb8-410c-94fc-a7cd602e845d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1517.831524] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:fc:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62729ef9-4eb8-410c-94fc-a7cd602e845d', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.841755] env[62510]: DEBUG oslo.service.loopingcall [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.842083] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1517.842377] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a11dc11b-475f-4048-95d5-14c7a57ae0f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.862564] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.862564] env[62510]: value = "task-1768525" [ 1517.862564] env[62510]: _type = "Task" [ 1517.862564] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.871025] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768525, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.969168] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance fae7e580-ab09-4fda-9cbe-0e066ddcb85c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1517.969658] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 75e06a24-b96c-4a42-bc2d-b0b960e3301a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1517.969658] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1517.969658] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0029d975-bd48-4558-9f41-a0cf91336393 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.159015] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768524, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.168122] env[62510]: DEBUG nova.compute.manager [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Received event network-changed-62729ef9-4eb8-410c-94fc-a7cd602e845d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1518.168539] env[62510]: DEBUG nova.compute.manager [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Refreshing instance network info cache due to event network-changed-62729ef9-4eb8-410c-94fc-a7cd602e845d. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1518.168742] env[62510]: DEBUG oslo_concurrency.lockutils [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] Acquiring lock "refresh_cache-2d2ab209-8072-4e64-8170-50d96d71bc54" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.168894] env[62510]: DEBUG oslo_concurrency.lockutils [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] Acquired lock "refresh_cache-2d2ab209-8072-4e64-8170-50d96d71bc54" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.169094] env[62510]: DEBUG nova.network.neutron [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Refreshing network info cache for port 62729ef9-4eb8-410c-94fc-a7cd602e845d {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1518.216383] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f469c2-88ac-bcd8-b857-c8194955cbb7, 'name': SearchDatastore_Task, 'duration_secs': 0.012367} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.216694] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.216959] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a09a34de-fe7c-414b-8a89-2e9271c72a5c/a09a34de-fe7c-414b-8a89-2e9271c72a5c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1518.217287] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.217497] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.217728] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64a21068-16bf-40ca-b144-ec65718a9e98 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.220049] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59ce1499-5229-4b41-a6af-e9477b81258b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.227126] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1518.227126] env[62510]: value = "task-1768526" [ 1518.227126] env[62510]: _type = "Task" [ 1518.227126] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.231892] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.232111] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.233404] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a3e29c5-cfb7-4d3a-b5da-2a28e5182e39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.239446] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.242804] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1518.242804] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5299644a-891c-0e30-d216-cba033508e6b" [ 1518.242804] env[62510]: _type = "Task" [ 1518.242804] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.251405] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5299644a-891c-0e30-d216-cba033508e6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.372994] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768525, 'name': CreateVM_Task, 'duration_secs': 0.374657} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.373188] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1518.373856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.374022] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.374363] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1518.374611] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-617d6000-9d69-40aa-b5b1-eafbd9bed2b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.379950] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1518.379950] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5259dfbe-cace-a512-3c28-c846579c7965" [ 1518.379950] env[62510]: _type = "Task" [ 1518.379950] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.388016] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5259dfbe-cace-a512-3c28-c846579c7965, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.473296] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 87d1d75e-41c4-42e6-bf58-deabb71400e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1518.660277] env[62510]: DEBUG oslo_vmware.api [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768524, 'name': PowerOnVM_Task, 'duration_secs': 0.677454} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.660575] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1518.660783] env[62510]: INFO nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Took 11.29 seconds to spawn the instance on the hypervisor. [ 1518.660961] env[62510]: DEBUG nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1518.661801] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbf8077-3158-4bac-abf3-ceea13472b7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.737145] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456971} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.737435] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a09a34de-fe7c-414b-8a89-2e9271c72a5c/a09a34de-fe7c-414b-8a89-2e9271c72a5c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1518.737634] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1518.737886] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae29f8e2-221e-4fb1-8ae9-c046bd816224 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.744752] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1518.744752] env[62510]: value = "task-1768527" [ 1518.744752] env[62510]: _type = "Task" [ 1518.744752] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.756809] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5299644a-891c-0e30-d216-cba033508e6b, 'name': SearchDatastore_Task, 'duration_secs': 0.009908} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.760332] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768527, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.760632] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f852349-c0b3-4051-bd9b-bb5274f490ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.765905] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1518.765905] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528358ba-9ef8-98c3-565d-65a59c5db107" [ 1518.765905] env[62510]: _type = "Task" [ 1518.765905] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.780809] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528358ba-9ef8-98c3-565d-65a59c5db107, 'name': SearchDatastore_Task, 'duration_secs': 0.010216} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.780809] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.781052] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1518.782225] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0988c299-8920-400c-9043-1cfcdab65879 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.791163] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1518.791163] env[62510]: value = "task-1768528" [ 1518.791163] env[62510]: _type = "Task" [ 1518.791163] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.799422] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.894515] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5259dfbe-cace-a512-3c28-c846579c7965, 'name': SearchDatastore_Task, 'duration_secs': 0.066407} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.894966] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.895330] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.895690] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.895930] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.896243] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.896614] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24a0c8a0-2da4-4f71-bb7d-0ffc92e47698 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.906547] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.907405] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.913335] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43ce1c35-dbc3-44d1-b34a-a2cf1e9532b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.919909] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1518.919909] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ab0b8a-f661-3784-929f-4388c4d2f9ac" [ 1518.919909] env[62510]: _type = "Task" [ 1518.919909] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.933705] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ab0b8a-f661-3784-929f-4388c4d2f9ac, 'name': SearchDatastore_Task, 'duration_secs': 0.00939} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.934935] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76eba7bc-26d3-4fc8-a67d-612212ce2ac6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.942904] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1518.942904] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5222a376-774b-1cd3-f31d-2d44a51c126e" [ 1518.942904] env[62510]: _type = "Task" [ 1518.942904] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.955982] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5222a376-774b-1cd3-f31d-2d44a51c126e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.976198] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 58e71d67-aed2-4329-ab60-4dfacff1d0a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1518.976561] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0158d7af-d3bb-4d9c-a7c6-fbab943977e2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.976671] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance aca56820-5a06-43dd-9d98-25421f7ef6a6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1518.976870] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 7cc6d4a6-2765-44e7-b378-e213a562593d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1518.977184] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1519.054036] env[62510]: DEBUG nova.network.neutron [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Updated VIF entry in instance network info cache for port 62729ef9-4eb8-410c-94fc-a7cd602e845d. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1519.054158] env[62510]: DEBUG nova.network.neutron [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Updating instance_info_cache with network_info: [{"id": "62729ef9-4eb8-410c-94fc-a7cd602e845d", "address": "fa:16:3e:6a:fc:1c", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62729ef9-4e", "ovs_interfaceid": "62729ef9-4eb8-410c-94fc-a7cd602e845d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.185283] env[62510]: INFO nova.compute.manager [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Took 42.35 seconds to build instance. [ 1519.258914] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768527, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083109} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.259297] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1519.260229] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10f0d9d-5f50-4f96-b3d1-31259f6ea454 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.285648] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] a09a34de-fe7c-414b-8a89-2e9271c72a5c/a09a34de-fe7c-414b-8a89-2e9271c72a5c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1519.286061] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c497dbd-521f-4903-bb73-6b3361c36f49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.310440] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768528, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469994} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.311886] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1519.312319] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1519.312596] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1519.312596] env[62510]: value = "task-1768529" [ 1519.312596] env[62510]: _type = "Task" [ 1519.312596] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.312888] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3254da6-4423-446f-8aca-c486d3dacabf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.323956] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768529, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.325651] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1519.325651] env[62510]: value = "task-1768530" [ 1519.325651] env[62510]: _type = "Task" [ 1519.325651] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.333846] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.454336] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5222a376-774b-1cd3-f31d-2d44a51c126e, 'name': SearchDatastore_Task, 'duration_secs': 0.012079} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.454641] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.455023] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2d2ab209-8072-4e64-8170-50d96d71bc54/2d2ab209-8072-4e64-8170-50d96d71bc54.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1519.455311] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4fd4b719-7658-461d-91d6-7e6ec1adf586 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.462061] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1519.462061] env[62510]: value = "task-1768531" [ 1519.462061] env[62510]: _type = "Task" [ 1519.462061] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.471063] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.481063] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1519.558100] env[62510]: DEBUG oslo_concurrency.lockutils [req-803af7ee-0e95-4fb3-afb5-18d82441334c req-7bedba22-2800-4021-9927-064ca54fc8d4 service nova] Releasing lock "refresh_cache-2d2ab209-8072-4e64-8170-50d96d71bc54" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.687803] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9c4598b8-711f-4147-80fa-9626b5b0d6f6 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e9711202-67f3-4fe2-befb-f28722ddea33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.172s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.825199] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768529, 'name': ReconfigVM_Task, 'duration_secs': 0.450017} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.825522] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Reconfigured VM instance instance-00000023 to attach disk [datastore1] a09a34de-fe7c-414b-8a89-2e9271c72a5c/a09a34de-fe7c-414b-8a89-2e9271c72a5c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1519.826251] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09e48675-b95a-4f91-b299-8bb57af2f794 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.836506] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.837990] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1519.837990] env[62510]: value = "task-1768532" [ 1519.837990] env[62510]: _type = "Task" [ 1519.837990] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.846166] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768532, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.973591] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.984607] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 568ce58c-9ce5-4b40-988f-f31d8e0c376d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1520.191151] env[62510]: DEBUG nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1520.281078] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "e9711202-67f3-4fe2-befb-f28722ddea33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.281259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e9711202-67f3-4fe2-befb-f28722ddea33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.281476] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "e9711202-67f3-4fe2-befb-f28722ddea33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.281658] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e9711202-67f3-4fe2-befb-f28722ddea33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.281829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e9711202-67f3-4fe2-befb-f28722ddea33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.285946] env[62510]: INFO nova.compute.manager [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Terminating instance [ 1520.335847] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.348581] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768532, 'name': Rename_Task, 'duration_secs': 0.19499} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.348903] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1520.349322] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9605dfdf-9557-4ac3-836a-88d32a52bbff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.355386] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1520.355386] env[62510]: value = "task-1768533" [ 1520.355386] env[62510]: _type = "Task" [ 1520.355386] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.363427] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768533, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.472389] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.488378] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 3533a113-6f46-4b18-872d-9bc1b0481969 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1520.488701] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1520.488852] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1520.713604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.790569] env[62510]: DEBUG nova.compute.manager [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1520.790741] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1520.791895] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a0627d-d6b3-47b0-abb8-7d0619bdd60a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.802925] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1520.803190] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bc380a4-c77c-44c9-be0f-4d0b77692b35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.811627] env[62510]: DEBUG oslo_vmware.api [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1520.811627] env[62510]: value = "task-1768534" [ 1520.811627] env[62510]: _type = "Task" [ 1520.811627] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.820663] env[62510]: DEBUG oslo_vmware.api [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.838633] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.868515] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768533, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.976041] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.983975] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33b776f-6e0a-49dc-914a-c824c227399a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.991842] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf96433d-b9d6-43d7-a364-0fb158641a18 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.026050] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5220caab-a69a-4c6b-8bc3-bfdff3ff3514 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.034783] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a3d4cf-64bf-46ba-a78f-dbe36451971d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.049282] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.322459] env[62510]: DEBUG oslo_vmware.api [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768534, 'name': PowerOffVM_Task, 'duration_secs': 0.227178} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.322827] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1521.323038] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1521.323323] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1c84c0c-9c92-4d4b-86d2-be8869515352 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.336900] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768530, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.365963] env[62510]: DEBUG oslo_vmware.api [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768533, 'name': PowerOnVM_Task, 'duration_secs': 0.614387} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.366264] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1521.366478] env[62510]: INFO nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Took 11.23 seconds to spawn the instance on the hypervisor. [ 1521.367303] env[62510]: DEBUG nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1521.368174] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bb82fa-a959-44bf-8000-160a004f826f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.398602] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1521.398899] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1521.399149] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleting the datastore file [datastore1] e9711202-67f3-4fe2-befb-f28722ddea33 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1521.399601] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-783c56c0-9a00-44ae-9787-424712c12c92 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.408383] env[62510]: DEBUG oslo_vmware.api [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1521.408383] env[62510]: value = "task-1768536" [ 1521.408383] env[62510]: _type = "Task" [ 1521.408383] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.420037] env[62510]: DEBUG oslo_vmware.api [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.476773] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.552484] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1521.837163] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768530, 'name': ExtendVirtualDisk_Task, 'duration_secs': 2.489893} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.837452] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1521.838250] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d12c86-1ad4-48a2-a403-a5cec77ca128 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.860542] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1521.860838] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84d538f6-e906-4395-bab4-8e1d407e15e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.886225] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1521.886225] env[62510]: value = "task-1768537" [ 1521.886225] env[62510]: _type = "Task" [ 1521.886225] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.893568] env[62510]: INFO nova.compute.manager [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Took 44.29 seconds to build instance. [ 1521.900146] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.919456] env[62510]: DEBUG oslo_vmware.api [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.976246] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768531, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.057497] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1522.057872] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.183s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.058125] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.337s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.058280] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.060724] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.789s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.062258] env[62510]: INFO nova.compute.claims [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.065132] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1522.065282] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances with incomplete migration {{(pid=62510) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 1522.096661] env[62510]: INFO nova.scheduler.client.report [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Deleted allocations for instance 0a940fd0-73cc-403d-9afc-a989c67dfdef [ 1522.398502] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d19cff2b-6f34-497a-bdad-6fbfc5598f2d tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.476s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.398791] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768537, 'name': ReconfigVM_Task, 'duration_secs': 0.494868} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.400096] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2/0158d7af-d3bb-4d9c-a7c6-fbab943977e2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1522.402016] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af107da8-102c-47ff-b54f-0755386fe507 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.408849] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1522.408849] env[62510]: value = "task-1768538" [ 1522.408849] env[62510]: _type = "Task" [ 1522.408849] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.421333] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768538, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.424740] env[62510]: DEBUG oslo_vmware.api [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.516227} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.424994] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1522.425193] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1522.425500] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1522.425689] env[62510]: INFO nova.compute.manager [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1522.425936] env[62510]: DEBUG oslo.service.loopingcall [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1522.426395] env[62510]: DEBUG nova.compute.manager [-] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1522.426495] env[62510]: DEBUG nova.network.neutron [-] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1522.477624] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768531, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.877089} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.480919] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2d2ab209-8072-4e64-8170-50d96d71bc54/2d2ab209-8072-4e64-8170-50d96d71bc54.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1522.481071] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1522.484645] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54e0b61b-07de-4f01-8d93-8e9c07587ef0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.493175] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1522.493175] env[62510]: value = "task-1768539" [ 1522.493175] env[62510]: _type = "Task" [ 1522.493175] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.501579] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.611507] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c1fce38-0a93-4fe9-a054-8564bd67e248 tempest-InstanceActionsTestJSON-994064216 tempest-InstanceActionsTestJSON-994064216-project-member] Lock "0a940fd0-73cc-403d-9afc-a989c67dfdef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.838s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.712134] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1522.712134] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1522.712368] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1522.903307] env[62510]: DEBUG nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1522.919457] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768538, 'name': Rename_Task, 'duration_secs': 0.299665} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.919755] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.920008] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b32432a5-13de-40aa-8b76-f5bbad1fcf9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.926513] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1522.926513] env[62510]: value = "task-1768540" [ 1522.926513] env[62510]: _type = "Task" [ 1522.926513] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.935234] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.971196] env[62510]: DEBUG nova.compute.manager [req-ae57a778-47fd-4b94-93bf-51f670e481c2 req-15f5d542-2361-47b8-8a5b-7a984d39a194 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Received event network-vif-deleted-c5e41d27-e1b7-40e9-9e95-de2a680aded2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1522.971433] env[62510]: INFO nova.compute.manager [req-ae57a778-47fd-4b94-93bf-51f670e481c2 req-15f5d542-2361-47b8-8a5b-7a984d39a194 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Neutron deleted interface c5e41d27-e1b7-40e9-9e95-de2a680aded2; detaching it from the instance and deleting it from the info cache [ 1522.971674] env[62510]: DEBUG nova.network.neutron [req-ae57a778-47fd-4b94-93bf-51f670e481c2 req-15f5d542-2361-47b8-8a5b-7a984d39a194 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.004182] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.163949} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.004182] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1523.006231] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e38eef6-9179-45e9-b34a-5bfb0a592ad7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.036195] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 2d2ab209-8072-4e64-8170-50d96d71bc54/2d2ab209-8072-4e64-8170-50d96d71bc54.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1523.036561] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c006a5a2-86b7-4738-8db7-a9807fe01091 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.057513] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1523.057513] env[62510]: value = "task-1768541" [ 1523.057513] env[62510]: _type = "Task" [ 1523.057513] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.065918] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768541, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.282571] env[62510]: DEBUG nova.network.neutron [-] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.377018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.377018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.377018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.377018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.377018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.384049] env[62510]: INFO nova.compute.manager [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Terminating instance [ 1523.431955] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.438535] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768540, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.481944] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4038ede8-0690-4e35-a171-7b5ebea47e73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.497235] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f16c4cb-2e10-4569-9573-6a1b30db4989 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.538744] env[62510]: DEBUG nova.compute.manager [req-ae57a778-47fd-4b94-93bf-51f670e481c2 req-15f5d542-2361-47b8-8a5b-7a984d39a194 service nova] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Detach interface failed, port_id=c5e41d27-e1b7-40e9-9e95-de2a680aded2, reason: Instance e9711202-67f3-4fe2-befb-f28722ddea33 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1523.572298] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768541, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.671949] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299238e6-6aab-4157-a375-12478157581e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.683713] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5968c51-cdce-4461-bc7b-5a8725302c44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.716257] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4fd5b0-9f28-4f47-9165-da110c75870e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.727372] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf77cce-8b3b-49ba-8a62-6293c2eccf0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.744506] env[62510]: DEBUG nova.compute.provider_tree [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.788034] env[62510]: INFO nova.compute.manager [-] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Took 1.36 seconds to deallocate network for instance. [ 1523.805818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.805818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.805905] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1523.888304] env[62510]: DEBUG nova.compute.manager [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1523.888613] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1523.889730] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92dd7b95-9747-4af2-81ba-443302862a65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.900415] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1523.900695] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fee1896-0077-4427-b5e2-07a85909f911 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.908308] env[62510]: DEBUG oslo_vmware.api [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1523.908308] env[62510]: value = "task-1768542" [ 1523.908308] env[62510]: _type = "Task" [ 1523.908308] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.916684] env[62510]: DEBUG oslo_vmware.api [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.939563] env[62510]: DEBUG oslo_vmware.api [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768540, 'name': PowerOnVM_Task, 'duration_secs': 0.807404} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.939876] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1523.940784] env[62510]: DEBUG nova.compute.manager [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1523.941940] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a96fb7-2a2e-4113-bec0-3f8ddcfbc4ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.068208] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768541, 'name': ReconfigVM_Task, 'duration_secs': 0.585172} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.068510] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 2d2ab209-8072-4e64-8170-50d96d71bc54/2d2ab209-8072-4e64-8170-50d96d71bc54.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1524.069137] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f35cfa5-1da4-43c0-8843-b048a4c4db84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.076408] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1524.076408] env[62510]: value = "task-1768543" [ 1524.076408] env[62510]: _type = "Task" [ 1524.076408] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.083913] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768543, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.247945] env[62510]: DEBUG nova.scheduler.client.report [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1524.297609] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.418820] env[62510]: DEBUG oslo_vmware.api [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768542, 'name': PowerOffVM_Task, 'duration_secs': 0.422527} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.418820] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1524.418993] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1524.419264] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-064d22c3-2768-46f3-ad2c-35a230a9e4e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.437479] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c2824-d5a2-cec8-2445-0bb9834f25e0/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1524.438539] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07971fcf-dfe1-428f-a33e-701c9152490c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.444834] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c2824-d5a2-cec8-2445-0bb9834f25e0/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1524.444834] env[62510]: ERROR oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c2824-d5a2-cec8-2445-0bb9834f25e0/disk-0.vmdk due to incomplete transfer. [ 1524.445173] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-57d8459d-cc02-458d-83eb-4ce61f7b8b79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.451791] env[62510]: DEBUG oslo_vmware.rw_handles [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/521c2824-d5a2-cec8-2445-0bb9834f25e0/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1524.451989] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Uploaded image 8fef84d7-72c6-43e1-819e-80c97d1b7e1b to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1524.454152] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1524.457892] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b0986d29-2168-4bd5-a2c6-9536309851fb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.465089] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1524.465089] env[62510]: value = "task-1768545" [ 1524.465089] env[62510]: _type = "Task" [ 1524.465089] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.465089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.472623] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768545, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.551063] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1524.551442] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1524.551442] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Deleting the datastore file [datastore1] a09a34de-fe7c-414b-8a89-2e9271c72a5c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1524.551561] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-deb6ad94-bd97-49a8-8270-303457cca528 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.559022] env[62510]: DEBUG oslo_vmware.api [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for the task: (returnval){ [ 1524.559022] env[62510]: value = "task-1768546" [ 1524.559022] env[62510]: _type = "Task" [ 1524.559022] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.568073] env[62510]: DEBUG oslo_vmware.api [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.586581] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768543, 'name': Rename_Task, 'duration_secs': 0.268876} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.586869] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1524.587236] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75d8dda2-7062-428d-bfa0-63ae4df8ffd4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.593358] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1524.593358] env[62510]: value = "task-1768547" [ 1524.593358] env[62510]: _type = "Task" [ 1524.593358] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.601545] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768547, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.754024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.754527] env[62510]: DEBUG nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1524.757274] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.697s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.758801] env[62510]: INFO nova.compute.claims [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.987206] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768545, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.072618] env[62510]: DEBUG oslo_vmware.api [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Task: {'id': task-1768546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175017} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.072618] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1525.072618] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1525.072618] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1525.072618] env[62510]: INFO nova.compute.manager [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1525.072618] env[62510]: DEBUG oslo.service.loopingcall [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1525.072618] env[62510]: DEBUG nova.compute.manager [-] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1525.072618] env[62510]: DEBUG nova.network.neutron [-] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1525.104329] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768547, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.270168] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [{"id": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "address": "fa:16:3e:81:65:65", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7489ebb6-ec", "ovs_interfaceid": "7489ebb6-ec5f-4097-9a62-81a2d3dedd52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.274973] env[62510]: DEBUG nova.compute.utils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.274973] env[62510]: DEBUG nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1525.277221] env[62510]: DEBUG nova.network.neutron [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.336156] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.336517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.336734] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.336945] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.337209] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.340574] env[62510]: INFO nova.compute.manager [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Terminating instance [ 1525.373643] env[62510]: DEBUG nova.policy [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0b465ab9caf4d989219f1fbbebd00ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd98518565b744451ba90ba301267213f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1525.478593] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768545, 'name': Destroy_Task, 'duration_secs': 0.801379} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.478894] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Destroyed the VM [ 1525.481127] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1525.481127] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0e97dcfd-a131-4702-9bac-0d8f8d0d02d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.488224] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1525.488224] env[62510]: value = "task-1768548" [ 1525.488224] env[62510]: _type = "Task" [ 1525.488224] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.500163] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768548, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.609618] env[62510]: DEBUG oslo_vmware.api [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768547, 'name': PowerOnVM_Task, 'duration_secs': 0.91172} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.609970] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1525.610566] env[62510]: INFO nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Took 10.70 seconds to spawn the instance on the hypervisor. [ 1525.614019] env[62510]: DEBUG nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1525.614019] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707e1f5f-60a0-478a-acb8-58a956f53230 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.774992] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-12768001-6ed0-47be-8f20-c59ee82b842a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.775247] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1525.779583] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.780148] env[62510]: DEBUG nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1525.782462] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.782704] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.783187] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.783371] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.783926] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.783926] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1525.783926] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1525.848561] env[62510]: DEBUG nova.compute.manager [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1525.848794] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1525.849997] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2d304e-5bef-43e9-a80e-0e8749de4c21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.860034] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1525.860289] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-071acdde-0823-4d1a-8a9f-a5551d3c4862 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.870582] env[62510]: DEBUG oslo_vmware.api [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1525.870582] env[62510]: value = "task-1768549" [ 1525.870582] env[62510]: _type = "Task" [ 1525.870582] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.879520] env[62510]: DEBUG nova.compute.manager [req-352d51e9-259c-436e-a41e-80cbf54acbce req-af51453d-6ebc-4d29-b3fa-ba1bc418754d service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Received event network-vif-deleted-0d218053-8e39-4829-a3ad-5837c5fbb1e2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1525.879869] env[62510]: INFO nova.compute.manager [req-352d51e9-259c-436e-a41e-80cbf54acbce req-af51453d-6ebc-4d29-b3fa-ba1bc418754d service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Neutron deleted interface 0d218053-8e39-4829-a3ad-5837c5fbb1e2; detaching it from the instance and deleting it from the info cache [ 1525.880056] env[62510]: DEBUG nova.network.neutron [req-352d51e9-259c-436e-a41e-80cbf54acbce req-af51453d-6ebc-4d29-b3fa-ba1bc418754d service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.884484] env[62510]: DEBUG oslo_vmware.api [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.887251] env[62510]: DEBUG nova.network.neutron [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Successfully created port: 6a83554a-ba95-455d-8a32-16fec19f8c40 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1525.998538] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768548, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.137647] env[62510]: INFO nova.compute.manager [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Took 43.44 seconds to build instance. [ 1526.138753] env[62510]: DEBUG nova.network.neutron [-] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1526.177793] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.178163] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.292247] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.373507] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20ce3a5-f283-45e5-908d-d7a7317c633a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.388453] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecc62c42-b37a-4cf3-a1ef-1c499a1b4191 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.391370] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d01ad6-4352-40e6-9319-5cef36bb8851 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.395040] env[62510]: DEBUG oslo_vmware.api [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768549, 'name': PowerOffVM_Task, 'duration_secs': 0.228317} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.395608] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1526.396313] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1526.397428] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e6e71ab1-fdb4-4b0c-9439-70cd58baacf7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.435340] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018ca51f-c561-46c7-ac2e-9d8c81a541b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.441592] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11b6d71-ca9e-4e1d-bf80-66a35ad0431a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.461134] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe32c1fa-e1b2-4f33-8527-b95741791d8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.479982] env[62510]: DEBUG nova.compute.manager [req-352d51e9-259c-436e-a41e-80cbf54acbce req-af51453d-6ebc-4d29-b3fa-ba1bc418754d service nova] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Detach interface failed, port_id=0d218053-8e39-4829-a3ad-5837c5fbb1e2, reason: Instance a09a34de-fe7c-414b-8a89-2e9271c72a5c could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1526.490612] env[62510]: DEBUG nova.compute.provider_tree [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.500286] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768548, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.641083] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5c978b9f-58f3-40c6-82ee-4dd67254a7fb tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.833s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.641561] env[62510]: INFO nova.compute.manager [-] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Took 1.57 seconds to deallocate network for instance. [ 1526.795099] env[62510]: DEBUG nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1526.823557] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1526.823832] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1526.823832] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.823994] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1526.824256] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.824349] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1526.824492] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1526.824894] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1526.824894] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1526.825023] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1526.825227] env[62510]: DEBUG nova.virt.hardware [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1526.826231] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1a1ffe-48f1-483a-958b-35e7fc983e7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.834660] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbcea8c-52e7-4d73-ba01-fdfa9c11831d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.999769] env[62510]: DEBUG nova.scheduler.client.report [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1527.003014] env[62510]: DEBUG oslo_vmware.api [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768548, 'name': RemoveSnapshot_Task, 'duration_secs': 1.054167} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.003508] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1527.003755] env[62510]: INFO nova.compute.manager [None req-203ebdd3-1191-4685-a4e8-e44e66fba96b tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Took 15.43 seconds to snapshot the instance on the hypervisor. [ 1527.145380] env[62510]: DEBUG nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1527.150767] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.505196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.505775] env[62510]: DEBUG nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1527.510478] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.402s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.512094] env[62510]: INFO nova.compute.claims [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1527.570642] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f19d27-3db2-44e7-9490-2e695c9ecda5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.581194] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c77544-d014-418a-a4a7-e6f7d9e7c035 tempest-ServersAdminNegativeTestJSON-275789760 tempest-ServersAdminNegativeTestJSON-275789760-project-admin] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Suspending the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1527.581194] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3b196c28-b7ea-498b-a10d-9f8830c5f917 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.590017] env[62510]: DEBUG oslo_vmware.api [None req-d5c77544-d014-418a-a4a7-e6f7d9e7c035 tempest-ServersAdminNegativeTestJSON-275789760 tempest-ServersAdminNegativeTestJSON-275789760-project-admin] Waiting for the task: (returnval){ [ 1527.590017] env[62510]: value = "task-1768551" [ 1527.590017] env[62510]: _type = "Task" [ 1527.590017] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.596502] env[62510]: DEBUG oslo_vmware.api [None req-d5c77544-d014-418a-a4a7-e6f7d9e7c035 tempest-ServersAdminNegativeTestJSON-275789760 tempest-ServersAdminNegativeTestJSON-275789760-project-admin] Task: {'id': task-1768551, 'name': SuspendVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.674477] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.019653] env[62510]: DEBUG nova.compute.utils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.023020] env[62510]: DEBUG nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1528.023203] env[62510]: DEBUG nova.network.neutron [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1528.102437] env[62510]: DEBUG oslo_vmware.api [None req-d5c77544-d014-418a-a4a7-e6f7d9e7c035 tempest-ServersAdminNegativeTestJSON-275789760 tempest-ServersAdminNegativeTestJSON-275789760-project-admin] Task: {'id': task-1768551, 'name': SuspendVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.108304] env[62510]: DEBUG nova.policy [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8223ce0f63d477ba38653abf5992eb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc4a16c9d5d346489a9c8efec041df23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1528.179717] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1528.179717] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1528.179717] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleting the datastore file [datastore1] 0158d7af-d3bb-4d9c-a7c6-fbab943977e2 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1528.180068] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60b61b84-3e79-4c34-9fb6-0acce72beb41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.189069] env[62510]: DEBUG oslo_vmware.api [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1528.189069] env[62510]: value = "task-1768552" [ 1528.189069] env[62510]: _type = "Task" [ 1528.189069] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.197224] env[62510]: DEBUG oslo_vmware.api [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.313042] env[62510]: DEBUG nova.network.neutron [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Successfully updated port: 6a83554a-ba95-455d-8a32-16fec19f8c40 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1528.524432] env[62510]: DEBUG nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1528.564696] env[62510]: DEBUG nova.compute.manager [req-4004807c-1d08-4f10-9f1f-62d72ff9bd89 req-742ee635-e5e4-4a79-9d47-b53a8be396a1 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Received event network-vif-plugged-6a83554a-ba95-455d-8a32-16fec19f8c40 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1528.565779] env[62510]: DEBUG oslo_concurrency.lockutils [req-4004807c-1d08-4f10-9f1f-62d72ff9bd89 req-742ee635-e5e4-4a79-9d47-b53a8be396a1 service nova] Acquiring lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.566684] env[62510]: DEBUG oslo_concurrency.lockutils [req-4004807c-1d08-4f10-9f1f-62d72ff9bd89 req-742ee635-e5e4-4a79-9d47-b53a8be396a1 service nova] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.567026] env[62510]: DEBUG oslo_concurrency.lockutils [req-4004807c-1d08-4f10-9f1f-62d72ff9bd89 req-742ee635-e5e4-4a79-9d47-b53a8be396a1 service nova] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.567786] env[62510]: DEBUG nova.compute.manager [req-4004807c-1d08-4f10-9f1f-62d72ff9bd89 req-742ee635-e5e4-4a79-9d47-b53a8be396a1 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] No waiting events found dispatching network-vif-plugged-6a83554a-ba95-455d-8a32-16fec19f8c40 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1528.567786] env[62510]: WARNING nova.compute.manager [req-4004807c-1d08-4f10-9f1f-62d72ff9bd89 req-742ee635-e5e4-4a79-9d47-b53a8be396a1 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Received unexpected event network-vif-plugged-6a83554a-ba95-455d-8a32-16fec19f8c40 for instance with vm_state building and task_state spawning. [ 1528.611900] env[62510]: DEBUG oslo_vmware.api [None req-d5c77544-d014-418a-a4a7-e6f7d9e7c035 tempest-ServersAdminNegativeTestJSON-275789760 tempest-ServersAdminNegativeTestJSON-275789760-project-admin] Task: {'id': task-1768551, 'name': SuspendVM_Task, 'duration_secs': 0.652386} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.612548] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c77544-d014-418a-a4a7-e6f7d9e7c035 tempest-ServersAdminNegativeTestJSON-275789760 tempest-ServersAdminNegativeTestJSON-275789760-project-admin] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Suspended the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1528.612875] env[62510]: DEBUG nova.compute.manager [None req-d5c77544-d014-418a-a4a7-e6f7d9e7c035 tempest-ServersAdminNegativeTestJSON-275789760 tempest-ServersAdminNegativeTestJSON-275789760-project-admin] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1528.613994] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5de090-8c4c-4cb1-9108-49805ad85d63 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.661260] env[62510]: DEBUG nova.network.neutron [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Successfully created port: c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1528.701102] env[62510]: DEBUG oslo_vmware.api [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154582} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.701192] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1528.701428] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1528.701612] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1528.701787] env[62510]: INFO nova.compute.manager [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Took 2.85 seconds to destroy the instance on the hypervisor. [ 1528.702043] env[62510]: DEBUG oslo.service.loopingcall [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1528.703502] env[62510]: DEBUG nova.compute.manager [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1528.703502] env[62510]: DEBUG nova.network.neutron [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1528.815902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "refresh_cache-58e71d67-aed2-4329-ab60-4dfacff1d0a2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.816063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "refresh_cache-58e71d67-aed2-4329-ab60-4dfacff1d0a2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.816222] env[62510]: DEBUG nova.network.neutron [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.036879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.037137] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.139516] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378648b9-ae40-4110-8012-c15c811dcfed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.148121] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44850b7-22a0-4a0a-8024-953673ef49e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.183199] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965bc151-620d-4523-bd45-1d281f983653 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.192115] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d29f7a5-5341-4c17-860f-e35422db4775 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.214039] env[62510]: DEBUG nova.compute.provider_tree [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1529.226041] env[62510]: DEBUG nova.compute.manager [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1529.228425] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb57a9ef-83b7-46c0-9015-4f22a70e0246 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.368554] env[62510]: DEBUG nova.network.neutron [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1529.543952] env[62510]: DEBUG nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1529.575127] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.575127] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.575127] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.575127] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.575127] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.575127] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.578660] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.578660] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.578660] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.578660] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.578660] env[62510]: DEBUG nova.virt.hardware [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.578660] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593c0c5b-6a92-42a5-b7e2-f0669055f2a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.586863] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5849d5f0-93ab-4d1f-8fa4-c26b84f630b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.613139] env[62510]: DEBUG nova.network.neutron [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Updating instance_info_cache with network_info: [{"id": "6a83554a-ba95-455d-8a32-16fec19f8c40", "address": "fa:16:3e:18:8a:b5", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a83554a-ba", "ovs_interfaceid": "6a83554a-ba95-455d-8a32-16fec19f8c40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.717666] env[62510]: DEBUG nova.scheduler.client.report [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1529.736458] env[62510]: DEBUG nova.network.neutron [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.747753] env[62510]: INFO nova.compute.manager [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] instance snapshotting [ 1529.750369] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22b6f44-df2a-4d24-aded-20810dccc0f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.755180] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.755427] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.755637] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.755839] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.756018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.771419] env[62510]: INFO nova.compute.manager [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Terminating instance [ 1529.773806] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4232f2cf-d913-4fcb-af1d-2fc86f2252d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.117925] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "refresh_cache-58e71d67-aed2-4329-ab60-4dfacff1d0a2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.118275] env[62510]: DEBUG nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Instance network_info: |[{"id": "6a83554a-ba95-455d-8a32-16fec19f8c40", "address": "fa:16:3e:18:8a:b5", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a83554a-ba", "ovs_interfaceid": "6a83554a-ba95-455d-8a32-16fec19f8c40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1530.118705] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:8a:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4e52d8a-b086-4333-a5a1-938680a2d2bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a83554a-ba95-455d-8a32-16fec19f8c40', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1530.126324] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating folder: Project (d98518565b744451ba90ba301267213f). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1530.126591] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d35f74a2-28cc-4b22-b6c5-a93010eb7b12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.137307] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created folder: Project (d98518565b744451ba90ba301267213f) in parent group-v367197. [ 1530.137535] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating folder: Instances. Parent ref: group-v367295. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1530.137720] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0a128a4-7cde-469b-a90e-e1fcd89b0069 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.146277] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created folder: Instances in parent group-v367295. [ 1530.147043] env[62510]: DEBUG oslo.service.loopingcall [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1530.147043] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1530.147043] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17ed80ff-1556-49c0-a8ad-d311cf198ea4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.167418] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1530.167418] env[62510]: value = "task-1768555" [ 1530.167418] env[62510]: _type = "Task" [ 1530.167418] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.175414] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768555, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.222501] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.223227] env[62510]: DEBUG nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1530.226135] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.813s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.227695] env[62510]: INFO nova.compute.claims [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1530.238953] env[62510]: INFO nova.compute.manager [-] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Took 1.54 seconds to deallocate network for instance. [ 1530.283367] env[62510]: DEBUG nova.compute.manager [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1530.283367] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1530.283367] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7937d175-9019-4f1a-8eaa-39fcdc58c701 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.287781] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1530.288468] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-73503103-f8bb-480b-94f4-1d17a07b9ed5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.296181] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1530.298084] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17977801-3cfe-48f9-84bb-c8d73cf42a1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.300480] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1530.300480] env[62510]: value = "task-1768556" [ 1530.300480] env[62510]: _type = "Task" [ 1530.300480] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.306878] env[62510]: DEBUG oslo_vmware.api [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1530.306878] env[62510]: value = "task-1768557" [ 1530.306878] env[62510]: _type = "Task" [ 1530.306878] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.314452] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768556, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.320230] env[62510]: DEBUG oslo_vmware.api [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768557, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.479808] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.479808] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.616370] env[62510]: DEBUG nova.compute.manager [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Received event network-changed-6a83554a-ba95-455d-8a32-16fec19f8c40 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1530.616482] env[62510]: DEBUG nova.compute.manager [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Refreshing instance network info cache due to event network-changed-6a83554a-ba95-455d-8a32-16fec19f8c40. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1530.616673] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] Acquiring lock "refresh_cache-58e71d67-aed2-4329-ab60-4dfacff1d0a2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.616815] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] Acquired lock "refresh_cache-58e71d67-aed2-4329-ab60-4dfacff1d0a2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.616982] env[62510]: DEBUG nova.network.neutron [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Refreshing network info cache for port 6a83554a-ba95-455d-8a32-16fec19f8c40 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1530.678041] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768555, 'name': CreateVM_Task, 'duration_secs': 0.372913} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.678887] env[62510]: DEBUG nova.network.neutron [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Successfully updated port: c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.679914] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1530.683940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.683940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.683940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1530.684352] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71191a78-4960-430c-a140-ddf257ebcfe0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.691032] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1530.691032] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521dbe97-53fe-1c85-9b95-9232505733de" [ 1530.691032] env[62510]: _type = "Task" [ 1530.691032] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.700289] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521dbe97-53fe-1c85-9b95-9232505733de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.731973] env[62510]: DEBUG nova.compute.utils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1530.735250] env[62510]: DEBUG nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1530.735418] env[62510]: DEBUG nova.network.neutron [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1530.746712] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.787381] env[62510]: DEBUG nova.policy [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4cc966cb72fa41108733a0e93d79c410', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3109fa7889c64dfda2117d4cd58aa528', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1530.814336] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768556, 'name': CreateSnapshot_Task, 'duration_secs': 0.49744} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.814972] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1530.815931] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac07b950-f4b6-43ad-93c8-eb7201aecf58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.821164] env[62510]: DEBUG oslo_vmware.api [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768557, 'name': PowerOffVM_Task, 'duration_secs': 0.224439} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.821751] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1530.821878] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1530.822120] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a76dac8-4795-4fab-8545-5afb00bc8dae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.893826] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1530.894058] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1530.894247] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Deleting the datastore file [datastore1] cfe53f9c-d78b-4af7-b991-f3549c03f22d {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1530.894528] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c5c52c6-50c5-400e-8330-32900f0ab38e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.901496] env[62510]: DEBUG oslo_vmware.api [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1530.901496] env[62510]: value = "task-1768559" [ 1530.901496] env[62510]: _type = "Task" [ 1530.901496] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.910831] env[62510]: DEBUG oslo_vmware.api [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.183653] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.183826] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.183958] env[62510]: DEBUG nova.network.neutron [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1531.205421] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521dbe97-53fe-1c85-9b95-9232505733de, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.205935] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.206276] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1531.206570] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.206780] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.207029] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1531.207594] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4568afb-c30a-4e00-a9d7-ff1419a75b92 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.216698] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1531.216905] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1531.217646] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d820b625-6913-4a4a-aa49-2a3f13776b85 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.223216] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1531.223216] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ccb456-9641-fe08-910a-7a6cc0ba70db" [ 1531.223216] env[62510]: _type = "Task" [ 1531.223216] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.231724] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "2d2ab209-8072-4e64-8170-50d96d71bc54" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.232025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.232304] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "2d2ab209-8072-4e64-8170-50d96d71bc54-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.232545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.232796] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.234340] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ccb456-9641-fe08-910a-7a6cc0ba70db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.234856] env[62510]: INFO nova.compute.manager [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Terminating instance [ 1531.237656] env[62510]: DEBUG nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1531.337527] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1531.340213] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-53ce21e9-7789-4f02-a2b4-fd86821066bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.350108] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1531.350108] env[62510]: value = "task-1768560" [ 1531.350108] env[62510]: _type = "Task" [ 1531.350108] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.357954] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768560, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.411335] env[62510]: DEBUG oslo_vmware.api [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12729} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.411598] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1531.411786] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1531.411967] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1531.412164] env[62510]: INFO nova.compute.manager [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1531.412410] env[62510]: DEBUG oslo.service.loopingcall [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.412603] env[62510]: DEBUG nova.compute.manager [-] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1531.412701] env[62510]: DEBUG nova.network.neutron [-] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1531.678494] env[62510]: DEBUG nova.network.neutron [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Updated VIF entry in instance network info cache for port 6a83554a-ba95-455d-8a32-16fec19f8c40. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1531.679158] env[62510]: DEBUG nova.network.neutron [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Updating instance_info_cache with network_info: [{"id": "6a83554a-ba95-455d-8a32-16fec19f8c40", "address": "fa:16:3e:18:8a:b5", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a83554a-ba", "ovs_interfaceid": "6a83554a-ba95-455d-8a32-16fec19f8c40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.736498] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ccb456-9641-fe08-910a-7a6cc0ba70db, 'name': SearchDatastore_Task, 'duration_secs': 0.008038} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.740616] env[62510]: DEBUG nova.compute.manager [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1531.741020] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.741713] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14674afb-d112-432f-985d-200694d9f044 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.744766] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605e3663-2663-4600-9338-41f6a62fb299 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.750449] env[62510]: DEBUG nova.network.neutron [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.759030] env[62510]: DEBUG nova.network.neutron [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Successfully created port: d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1531.769152] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1531.769152] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f2da99-1786-d416-b325-01bafdd91029" [ 1531.769152] env[62510]: _type = "Task" [ 1531.769152] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.770306] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1531.774367] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4efdfe04-565d-4adc-a028-5f38be7034a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.782770] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f2da99-1786-d416-b325-01bafdd91029, 'name': SearchDatastore_Task, 'duration_secs': 0.011305} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.785465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.785868] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 58e71d67-aed2-4329-ab60-4dfacff1d0a2/58e71d67-aed2-4329-ab60-4dfacff1d0a2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1531.786472] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efb96393-8181-4b1e-84bf-56721c29d606 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.794225] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1531.794225] env[62510]: value = "task-1768562" [ 1531.794225] env[62510]: _type = "Task" [ 1531.794225] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.806785] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768562, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.865934] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768560, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.875766] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.876271] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.876619] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Deleting the datastore file [datastore1] 2d2ab209-8072-4e64-8170-50d96d71bc54 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.877026] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d046a93-f2e2-47eb-91f5-80f02164efac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.888020] env[62510]: DEBUG oslo_vmware.api [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1531.888020] env[62510]: value = "task-1768563" [ 1531.888020] env[62510]: _type = "Task" [ 1531.888020] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.893262] env[62510]: DEBUG oslo_vmware.api [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.954386] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8357f4-769d-44bb-a229-dc6a31620ab7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.962329] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e455e9cc-6691-49ec-b39f-840767a26898 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.999021] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4974c7d2-5caa-41cf-b013-20298a0859dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.004404] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b8a3ab-ca5e-49a3-9dee-bea06819bda1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.026687] env[62510]: DEBUG nova.compute.provider_tree [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.177183] env[62510]: DEBUG nova.network.neutron [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Updating instance_info_cache with network_info: [{"id": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "address": "fa:16:3e:d4:ed:51", "network": {"id": "37039935-bfed-4317-b0bb-aa8df5f89dbd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1046853355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc4a16c9d5d346489a9c8efec041df23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2d4507d-2f", "ovs_interfaceid": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.188277] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] Releasing lock "refresh_cache-58e71d67-aed2-4329-ab60-4dfacff1d0a2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.188277] env[62510]: DEBUG nova.compute.manager [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Received event network-vif-deleted-7a3daaec-85e6-418b-a6c1-a74dcb3b41ac {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1532.188277] env[62510]: DEBUG nova.compute.manager [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Received event network-vif-plugged-c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1532.188277] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] Acquiring lock "3533a113-6f46-4b18-872d-9bc1b0481969-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.188277] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] Lock "3533a113-6f46-4b18-872d-9bc1b0481969-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.188277] env[62510]: DEBUG oslo_concurrency.lockutils [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] Lock "3533a113-6f46-4b18-872d-9bc1b0481969-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.188277] env[62510]: DEBUG nova.compute.manager [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] No waiting events found dispatching network-vif-plugged-c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1532.188277] env[62510]: WARNING nova.compute.manager [req-a8b01df5-92d9-43df-b4ff-6ceb48ed593c req-908f4674-0944-44a7-85fe-361a0067cb11 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Received unexpected event network-vif-plugged-c2d4507d-2fd6-466d-9025-685dbebc79f3 for instance with vm_state building and task_state spawning. [ 1532.267735] env[62510]: DEBUG nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1532.308473] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768562, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.314643] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1532.314643] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1532.314643] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1532.314643] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1532.314643] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1532.315694] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1532.316126] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1532.316444] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1532.316722] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1532.317064] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1532.317409] env[62510]: DEBUG nova.virt.hardware [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1532.318837] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec82c0c6-8ecd-4e48-9c1f-85c65f15121d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.327120] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-840aa4f0-bfe4-4f17-9034-01a645f94add {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.360229] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768560, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.395319] env[62510]: DEBUG oslo_vmware.api [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.473327} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.395319] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1532.395319] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1532.395602] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1532.395932] env[62510]: INFO nova.compute.manager [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1532.396316] env[62510]: DEBUG oslo.service.loopingcall [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.396626] env[62510]: DEBUG nova.compute.manager [-] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1532.396836] env[62510]: DEBUG nova.network.neutron [-] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1532.494221] env[62510]: DEBUG nova.compute.manager [req-65b3010c-66b0-4260-acca-113932667858 req-a5ed3000-0710-40ea-9ca5-de67a389d0b3 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Received event network-vif-deleted-1e000a52-1d14-40cd-a33b-51dd1c0196b5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1532.494418] env[62510]: INFO nova.compute.manager [req-65b3010c-66b0-4260-acca-113932667858 req-a5ed3000-0710-40ea-9ca5-de67a389d0b3 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Neutron deleted interface 1e000a52-1d14-40cd-a33b-51dd1c0196b5; detaching it from the instance and deleting it from the info cache [ 1532.494587] env[62510]: DEBUG nova.network.neutron [req-65b3010c-66b0-4260-acca-113932667858 req-a5ed3000-0710-40ea-9ca5-de67a389d0b3 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.530232] env[62510]: DEBUG nova.scheduler.client.report [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1532.682329] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Releasing lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.682779] env[62510]: DEBUG nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Instance network_info: |[{"id": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "address": "fa:16:3e:d4:ed:51", "network": {"id": "37039935-bfed-4317-b0bb-aa8df5f89dbd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1046853355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc4a16c9d5d346489a9c8efec041df23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2d4507d-2f", "ovs_interfaceid": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1532.683682] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:ed:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2d4507d-2fd6-466d-9025-685dbebc79f3', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.691782] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Creating folder: Project (fc4a16c9d5d346489a9c8efec041df23). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.692715] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb5c84f5-a543-455e-a261-17213a8c8364 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.704446] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Created folder: Project (fc4a16c9d5d346489a9c8efec041df23) in parent group-v367197. [ 1532.704651] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Creating folder: Instances. Parent ref: group-v367300. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.704902] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-236a7c1f-c073-4d7c-95e9-05626f6ad9f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.714370] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Created folder: Instances in parent group-v367300. [ 1532.714624] env[62510]: DEBUG oslo.service.loopingcall [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.714812] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.715068] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ec2b881-931b-4be7-9a00-de7ca084c7a3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.731663] env[62510]: DEBUG nova.compute.manager [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Received event network-changed-c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1532.731851] env[62510]: DEBUG nova.compute.manager [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Refreshing instance network info cache due to event network-changed-c2d4507d-2fd6-466d-9025-685dbebc79f3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1532.732077] env[62510]: DEBUG oslo_concurrency.lockutils [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] Acquiring lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.732219] env[62510]: DEBUG oslo_concurrency.lockutils [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] Acquired lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.732379] env[62510]: DEBUG nova.network.neutron [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Refreshing network info cache for port c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.738144] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.738144] env[62510]: value = "task-1768566" [ 1532.738144] env[62510]: _type = "Task" [ 1532.738144] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.746908] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768566, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.771251] env[62510]: DEBUG nova.network.neutron [-] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.807428] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768562, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519814} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.807428] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 58e71d67-aed2-4329-ab60-4dfacff1d0a2/58e71d67-aed2-4329-ab60-4dfacff1d0a2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1532.807428] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1532.807428] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3100b567-897e-4de5-be79-3053f015804c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.813563] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1532.813563] env[62510]: value = "task-1768567" [ 1532.813563] env[62510]: _type = "Task" [ 1532.813563] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.824883] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.861765] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768560, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.001523] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-833918ae-72f8-412f-946b-0f048c639b20 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.013503] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0956783-3c22-4452-87bf-7992bcbbcb70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.047263] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.820s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.047263] env[62510]: DEBUG nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1533.050669] env[62510]: DEBUG nova.compute.manager [req-65b3010c-66b0-4260-acca-113932667858 req-a5ed3000-0710-40ea-9ca5-de67a389d0b3 service nova] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Detach interface failed, port_id=1e000a52-1d14-40cd-a33b-51dd1c0196b5, reason: Instance cfe53f9c-d78b-4af7-b991-f3549c03f22d could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1533.051301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.588s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.051481] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.053441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.232s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.053627] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.055345] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.448s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.055525] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.057086] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.051s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.057277] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.058807] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.013s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.058990] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.060530] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.325s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.061984] env[62510]: INFO nova.compute.claims [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1533.085330] env[62510]: INFO nova.scheduler.client.report [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleted allocations for instance 26b283b0-98b4-4a15-abe0-fbf97e1f49eb [ 1533.088080] env[62510]: INFO nova.scheduler.client.report [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Deleted allocations for instance a040671e-941d-4406-81af-f2f7a4b690e4 [ 1533.103176] env[62510]: INFO nova.scheduler.client.report [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleted allocations for instance 75e06a24-b96c-4a42-bc2d-b0b960e3301a [ 1533.104414] env[62510]: INFO nova.scheduler.client.report [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Deleted allocations for instance aca56820-5a06-43dd-9d98-25421f7ef6a6 [ 1533.120278] env[62510]: INFO nova.scheduler.client.report [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Deleted allocations for instance 9a1a0428-8ccd-4614-8853-ef3eeec23d55 [ 1533.180865] env[62510]: DEBUG nova.network.neutron [-] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.247900] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768566, 'name': CreateVM_Task, 'duration_secs': 0.371309} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.248139] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.248822] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.249310] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.249310] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.249571] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a597347-95a1-4d89-89e9-33ddbdf0d46e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.254515] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1533.254515] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52544e39-63b7-a754-5a27-7391e6ae8656" [ 1533.254515] env[62510]: _type = "Task" [ 1533.254515] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.261853] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52544e39-63b7-a754-5a27-7391e6ae8656, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.274652] env[62510]: INFO nova.compute.manager [-] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Took 1.86 seconds to deallocate network for instance. [ 1533.326829] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084537} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.327142] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1533.328036] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71138a8a-6c6e-4659-8fed-df796850fd8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.350180] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 58e71d67-aed2-4329-ab60-4dfacff1d0a2/58e71d67-aed2-4329-ab60-4dfacff1d0a2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1533.352851] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0f6135a-a634-410d-80fb-e75b18530de3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.375740] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768560, 'name': CloneVM_Task, 'duration_secs': 1.891996} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.377120] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Created linked-clone VM from snapshot [ 1533.377514] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1533.377514] env[62510]: value = "task-1768568" [ 1533.377514] env[62510]: _type = "Task" [ 1533.377514] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.378413] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20c0f3c-7de6-4790-91c0-c51b2abef312 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.391857] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768568, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.392145] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Uploading image 6b6ee3c5-953e-431d-bb84-94a3771138f0 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1533.417888] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1533.417888] env[62510]: value = "vm-367299" [ 1533.417888] env[62510]: _type = "VirtualMachine" [ 1533.417888] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1533.418186] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-27e57a10-affc-4dbf-9d3c-7064506ce14a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.424084] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lease: (returnval){ [ 1533.424084] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521b4a3f-e31b-8b2f-404f-0dd68898c201" [ 1533.424084] env[62510]: _type = "HttpNfcLease" [ 1533.424084] env[62510]: } obtained for exporting VM: (result){ [ 1533.424084] env[62510]: value = "vm-367299" [ 1533.424084] env[62510]: _type = "VirtualMachine" [ 1533.424084] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1533.424343] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the lease: (returnval){ [ 1533.424343] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521b4a3f-e31b-8b2f-404f-0dd68898c201" [ 1533.424343] env[62510]: _type = "HttpNfcLease" [ 1533.424343] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1533.430781] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1533.430781] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521b4a3f-e31b-8b2f-404f-0dd68898c201" [ 1533.430781] env[62510]: _type = "HttpNfcLease" [ 1533.430781] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1533.473994] env[62510]: DEBUG nova.network.neutron [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Updated VIF entry in instance network info cache for port c2d4507d-2fd6-466d-9025-685dbebc79f3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1533.474946] env[62510]: DEBUG nova.network.neutron [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Updating instance_info_cache with network_info: [{"id": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "address": "fa:16:3e:d4:ed:51", "network": {"id": "37039935-bfed-4317-b0bb-aa8df5f89dbd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1046853355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc4a16c9d5d346489a9c8efec041df23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2d4507d-2f", "ovs_interfaceid": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.553821] env[62510]: DEBUG nova.compute.utils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1533.555149] env[62510]: DEBUG nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1533.555319] env[62510]: DEBUG nova.network.neutron [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1533.597622] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e84501df-7708-4293-a107-8524f0fde6b2 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "26b283b0-98b4-4a15-abe0-fbf97e1f49eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.464s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.601164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0e3b24cb-60c3-474f-8b67-e42566499bf1 tempest-ServerDiagnosticsNegativeTest-564884613 tempest-ServerDiagnosticsNegativeTest-564884613-project-member] Lock "a040671e-941d-4406-81af-f2f7a4b690e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.659s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.614556] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fc5070c4-52fc-4c51-9438-de5c343b920f tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "75e06a24-b96c-4a42-bc2d-b0b960e3301a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.403s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.624497] env[62510]: DEBUG nova.policy [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd27cdf97844a4d8384ff42ce1f148e3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7658f50bda794df68c1e82f4978d787b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1533.627417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b4f784c-aa5c-4d58-a36e-7b0bfe953359 tempest-TenantUsagesTestJSON-1682320939 tempest-TenantUsagesTestJSON-1682320939-project-member] Lock "aca56820-5a06-43dd-9d98-25421f7ef6a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.113s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.629893] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7ad8f086-1223-4f78-8e94-748a6fcda1f6 tempest-ListServersNegativeTestJSON-1936654145 tempest-ListServersNegativeTestJSON-1936654145-project-member] Lock "9a1a0428-8ccd-4614-8853-ef3eeec23d55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.722s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.684444] env[62510]: INFO nova.compute.manager [-] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Took 1.29 seconds to deallocate network for instance. [ 1533.766296] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52544e39-63b7-a754-5a27-7391e6ae8656, 'name': SearchDatastore_Task, 'duration_secs': 0.009359} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.766740] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.767090] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1533.767486] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.767750] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.768051] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1533.768401] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b407885f-2603-41b1-9092-1c66e6604fcf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.776926] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1533.777270] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1533.778109] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6daca3c6-4e27-4c67-b1c5-1efa4d5a6e57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.781554] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.787067] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1533.787067] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5211f207-4dbb-4a6a-6237-158a00c0fac6" [ 1533.787067] env[62510]: _type = "Task" [ 1533.787067] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.793933] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5211f207-4dbb-4a6a-6237-158a00c0fac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.833563] env[62510]: DEBUG nova.network.neutron [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Successfully updated port: d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1533.892012] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768568, 'name': ReconfigVM_Task, 'duration_secs': 0.310864} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.892544] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 58e71d67-aed2-4329-ab60-4dfacff1d0a2/58e71d67-aed2-4329-ab60-4dfacff1d0a2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1533.893292] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e26fbd82-eae1-4ce5-b240-eea37915087d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.900213] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1533.900213] env[62510]: value = "task-1768570" [ 1533.900213] env[62510]: _type = "Task" [ 1533.900213] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.914746] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768570, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.933873] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1533.933873] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521b4a3f-e31b-8b2f-404f-0dd68898c201" [ 1533.933873] env[62510]: _type = "HttpNfcLease" [ 1533.933873] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1533.934135] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1533.934135] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521b4a3f-e31b-8b2f-404f-0dd68898c201" [ 1533.934135] env[62510]: _type = "HttpNfcLease" [ 1533.934135] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1533.936639] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f623d1e-b033-46c5-97ab-f398746fd3e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.943992] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f242d-9e9f-fc25-268f-6d603a755453/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1533.944215] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f242d-9e9f-fc25-268f-6d603a755453/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1534.011071] env[62510]: DEBUG oslo_concurrency.lockutils [req-f8f7780b-c216-4743-821c-d6cad4f6157e req-9111601b-92ee-478f-b59e-5c256f084d05 service nova] Releasing lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.027788] env[62510]: DEBUG nova.network.neutron [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Successfully created port: 9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1534.057284] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-76335de8-40b8-4f78-a066-fcbd1e988807 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.064420] env[62510]: DEBUG nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1534.192948] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.306080] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5211f207-4dbb-4a6a-6237-158a00c0fac6, 'name': SearchDatastore_Task, 'duration_secs': 0.008996} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.314248] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68291a2-c8e8-4363-8aca-07225dd56473 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.322343] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1534.322343] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b34eca-8760-dc89-0df3-970dcbe58365" [ 1534.322343] env[62510]: _type = "Task" [ 1534.322343] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.337512] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.337675] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.337847] env[62510]: DEBUG nova.network.neutron [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1534.339328] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b34eca-8760-dc89-0df3-970dcbe58365, 'name': SearchDatastore_Task, 'duration_secs': 0.011068} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.340328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.341095] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 3533a113-6f46-4b18-872d-9bc1b0481969/3533a113-6f46-4b18-872d-9bc1b0481969.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1534.341095] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b2b9227-4db9-4eaa-8c3c-c414611e5f0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.353612] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1534.353612] env[62510]: value = "task-1768571" [ 1534.353612] env[62510]: _type = "Task" [ 1534.353612] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.362681] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.411821] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768570, 'name': Rename_Task, 'duration_secs': 0.211004} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.414819] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1534.415609] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34279ca7-cd3c-40f2-8f2b-e86bca7ba014 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.422662] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1534.422662] env[62510]: value = "task-1768572" [ 1534.422662] env[62510]: _type = "Task" [ 1534.422662] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.435381] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.584026] env[62510]: DEBUG nova.compute.manager [req-b2d2d297-60a7-4116-b180-bcf4fb0a464e req-5b0c7137-7fcc-48a2-b3d4-adcec137139e service nova] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Received event network-vif-deleted-62729ef9-4eb8-410c-94fc-a7cd602e845d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1534.658570] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c92ca9-b007-490d-a2a6-27aa509d67b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.670967] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1422da10-b6fb-4979-85b4-11533cfc8c16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.706595] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3400797-4015-4c35-b385-465e8db172d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.715494] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f27ec8-aee6-4c60-91df-cb77fad369ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.732915] env[62510]: DEBUG nova.compute.provider_tree [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.806805] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.807089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.807320] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.807556] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.807660] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.810522] env[62510]: INFO nova.compute.manager [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Terminating instance [ 1534.867289] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768571, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.915427] env[62510]: DEBUG nova.compute.manager [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-vif-plugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1534.915644] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.915852] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.916046] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.917908] env[62510]: DEBUG nova.compute.manager [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] No waiting events found dispatching network-vif-plugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1534.917908] env[62510]: WARNING nova.compute.manager [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received unexpected event network-vif-plugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 for instance with vm_state building and task_state spawning. [ 1534.917908] env[62510]: DEBUG nova.compute.manager [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1534.917908] env[62510]: DEBUG nova.compute.manager [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing instance network info cache due to event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1534.917908] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] Acquiring lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.920293] env[62510]: DEBUG nova.network.neutron [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1534.936165] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768572, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.085505] env[62510]: DEBUG nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1535.109282] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1535.109694] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1535.109907] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1535.110224] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1535.110506] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1535.110734] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1535.111068] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1535.111376] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1535.111651] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1535.111921] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1535.112194] env[62510]: DEBUG nova.virt.hardware [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1535.113857] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede27e0a-5104-4ba0-b79f-b9381c409d41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.124884] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55612990-b68b-425d-abe5-9f021b1c3501 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.238410] env[62510]: DEBUG nova.scheduler.client.report [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1535.315481] env[62510]: DEBUG nova.compute.manager [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1535.316525] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1535.316863] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf295ac-f2b7-4481-a082-cae408937f99 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.321659] env[62510]: DEBUG nova.network.neutron [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.329438] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1535.329715] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-547db2f9-4c96-4305-94f6-b9859fb38b68 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.339738] env[62510]: DEBUG oslo_vmware.api [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1535.339738] env[62510]: value = "task-1768573" [ 1535.339738] env[62510]: _type = "Task" [ 1535.339738] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.353151] env[62510]: DEBUG oslo_vmware.api [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768573, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.364156] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768571, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535636} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.364433] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 3533a113-6f46-4b18-872d-9bc1b0481969/3533a113-6f46-4b18-872d-9bc1b0481969.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1535.364655] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1535.364946] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9eaeccfa-c12a-4e36-b7b6-95749182db9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.373523] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1535.373523] env[62510]: value = "task-1768574" [ 1535.373523] env[62510]: _type = "Task" [ 1535.373523] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.382431] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768574, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.435063] env[62510]: DEBUG oslo_vmware.api [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768572, 'name': PowerOnVM_Task, 'duration_secs': 0.958213} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.435365] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1535.435575] env[62510]: INFO nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1535.435762] env[62510]: DEBUG nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1535.436597] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9560ed71-9585-4053-b525-6926737e59b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.742497] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.682s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.743071] env[62510]: DEBUG nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1535.750708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.844s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.750952] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.754567] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.106s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.758885] env[62510]: INFO nova.compute.claims [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1535.806224] env[62510]: INFO nova.scheduler.client.report [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Deleted allocations for instance 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be [ 1535.827412] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.827737] env[62510]: DEBUG nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Instance network_info: |[{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1535.828044] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] Acquired lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.828239] env[62510]: DEBUG nova.network.neutron [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1535.829381] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:ab:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6ee81d1-3abc-4d5e-a8ca-658407cbd553', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1535.850846] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating folder: Project (3109fa7889c64dfda2117d4cd58aa528). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1535.850846] env[62510]: DEBUG nova.network.neutron [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Successfully updated port: 9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1535.850846] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d081cbf6-6a31-47dc-8fab-455d4e09b7af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.861502] env[62510]: DEBUG oslo_vmware.api [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768573, 'name': PowerOffVM_Task, 'duration_secs': 0.382639} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.861994] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1535.862389] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1535.863021] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1a0609a-e70f-4eed-a9e1-228cb77756b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.867373] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created folder: Project (3109fa7889c64dfda2117d4cd58aa528) in parent group-v367197. [ 1535.867709] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating folder: Instances. Parent ref: group-v367303. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1535.868654] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07f6178a-d225-4d2c-8d1e-ae4cbf199fbd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.883947] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created folder: Instances in parent group-v367303. [ 1535.883947] env[62510]: DEBUG oslo.service.loopingcall [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1535.883947] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1535.886733] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4cd291c6-7523-4f85-9974-d0c06c2ee199 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.901679] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768574, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073269} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.902422] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1535.903850] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f5fc14-5b81-4625-9dfb-38558c6fff87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.908419] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1535.908419] env[62510]: value = "task-1768578" [ 1535.908419] env[62510]: _type = "Task" [ 1535.908419] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.926537] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 3533a113-6f46-4b18-872d-9bc1b0481969/3533a113-6f46-4b18-872d-9bc1b0481969.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1535.932704] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8227f9d8-bd31-4de7-8557-d81c047f0a00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.955782] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 15%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.959028] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1535.959028] env[62510]: value = "task-1768579" [ 1535.959028] env[62510]: _type = "Task" [ 1535.959028] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.962818] env[62510]: INFO nova.compute.manager [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Took 47.71 seconds to build instance. [ 1535.972167] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1535.972295] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1535.972457] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleting the datastore file [datastore1] c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1535.973327] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfb33217-5e6f-4dbc-9e81-3f9d12bc3dc7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.978492] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.984242] env[62510]: DEBUG oslo_vmware.api [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for the task: (returnval){ [ 1535.984242] env[62510]: value = "task-1768580" [ 1535.984242] env[62510]: _type = "Task" [ 1535.984242] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.994676] env[62510]: DEBUG oslo_vmware.api [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.253129] env[62510]: DEBUG nova.compute.utils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1536.253777] env[62510]: DEBUG nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1536.253971] env[62510]: DEBUG nova.network.neutron [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1536.303357] env[62510]: DEBUG nova.policy [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da40c4af8488460d9cc18a0c53c22751', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83d477e726c84d6ebf945ba7f14c8240', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1536.316314] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cec0b5b-42e9-4ea6-ab8c-767c3b9a2494 tempest-DeleteServersAdminTestJSON-1062637524 tempest-DeleteServersAdminTestJSON-1062637524-project-member] Lock "8e3cefa1-fab9-469e-8a32-31b4a8ecf4be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.308s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.356071] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.356071] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquired lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.356071] env[62510]: DEBUG nova.network.neutron [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1536.432251] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.468569] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05f0c8e2-3fb9-43ca-86ad-09ff7f0b0ee5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.208s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.478936] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768579, 'name': ReconfigVM_Task, 'duration_secs': 0.515724} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.478936] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 3533a113-6f46-4b18-872d-9bc1b0481969/3533a113-6f46-4b18-872d-9bc1b0481969.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1536.478936] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8573d909-03a5-43fd-891e-bca20b06b97a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.486065] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1536.486065] env[62510]: value = "task-1768581" [ 1536.486065] env[62510]: _type = "Task" [ 1536.486065] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.498810] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768581, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.503608] env[62510]: DEBUG oslo_vmware.api [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Task: {'id': task-1768580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.331319} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.503830] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1536.504312] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1536.505439] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1536.505439] env[62510]: INFO nova.compute.manager [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1536.505513] env[62510]: DEBUG oslo.service.loopingcall [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1536.505706] env[62510]: DEBUG nova.compute.manager [-] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1536.506089] env[62510]: DEBUG nova.network.neutron [-] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1536.755902] env[62510]: DEBUG nova.network.neutron [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updated VIF entry in instance network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1536.756274] env[62510]: DEBUG nova.network.neutron [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.758959] env[62510]: DEBUG nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1536.778233] env[62510]: DEBUG nova.network.neutron [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Successfully created port: 47d66511-aecb-424e-91f3-0fe84c41ab26 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1536.928973] env[62510]: DEBUG nova.network.neutron [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1536.937442] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.969086] env[62510]: DEBUG nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1536.994316] env[62510]: DEBUG nova.compute.manager [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Received event network-vif-plugged-9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1536.995277] env[62510]: DEBUG oslo_concurrency.lockutils [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] Acquiring lock "2c5c38c1-511f-4aae-969a-eb6de128fae7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1536.995386] env[62510]: DEBUG oslo_concurrency.lockutils [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1536.995724] env[62510]: DEBUG oslo_concurrency.lockutils [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1536.995973] env[62510]: DEBUG nova.compute.manager [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] No waiting events found dispatching network-vif-plugged-9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1536.996215] env[62510]: WARNING nova.compute.manager [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Received unexpected event network-vif-plugged-9d4d0ed7-cab1-4f7d-9eda-faa60a248129 for instance with vm_state building and task_state spawning. [ 1536.996979] env[62510]: DEBUG nova.compute.manager [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Received event network-changed-9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1536.996979] env[62510]: DEBUG nova.compute.manager [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Refreshing instance network info cache due to event network-changed-9d4d0ed7-cab1-4f7d-9eda-faa60a248129. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1536.996979] env[62510]: DEBUG oslo_concurrency.lockutils [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] Acquiring lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.012061] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768581, 'name': Rename_Task, 'duration_secs': 0.305954} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.012647] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1537.012903] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6ac1699-462c-4aae-b5c7-78e6fdbc3587 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.022417] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1537.022417] env[62510]: value = "task-1768582" [ 1537.022417] env[62510]: _type = "Task" [ 1537.022417] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.036853] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768582, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.226019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.226019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.226019] env[62510]: DEBUG nova.compute.manager [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1537.228344] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42b95090-3e42-406a-8622-3f6f16cf4e44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.235947] env[62510]: DEBUG nova.network.neutron [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Updating instance_info_cache with network_info: [{"id": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "address": "fa:16:3e:fd:0f:b3", "network": {"id": "4d6e89ba-db6e-4017-86b9-eabc5266d9be", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1854895739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7658f50bda794df68c1e82f4978d787b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d4d0ed7-ca", "ovs_interfaceid": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.247250] env[62510]: DEBUG nova.compute.manager [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1537.247985] env[62510]: DEBUG nova.objects.instance [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lazy-loading 'flavor' on Instance uuid 58e71d67-aed2-4329-ab60-4dfacff1d0a2 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.262656] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f04cde8-7b96-451b-93cd-a4d896c2138f req-76d9a968-1f7e-4988-85ed-fdaf296045ed service nova] Releasing lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.328019] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48246fad-f146-4d06-9d75-b335c1fe607b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.339143] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efca53c1-e84d-44d7-9d5d-2e1afd528ae2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.381163] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30d472e-cfa1-411b-a875-4210a5f5b4f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.391311] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512acf79-d0c7-4f05-a8de-ba8bc5eeff17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.409271] env[62510]: DEBUG nova.compute.provider_tree [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1537.438097] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.475539] env[62510]: DEBUG nova.network.neutron [-] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.507535] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.512905] env[62510]: DEBUG nova.compute.manager [req-c4fdad2c-6f58-499d-b345-913472bf3811 req-f6bc1c38-51d5-4e1f-bdd8-9fa869d8958d service nova] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Received event network-vif-deleted-7a0f3d5f-4630-470a-9084-d1e05b4f306e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1537.543168] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768582, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.739545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Releasing lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1537.739907] env[62510]: DEBUG nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Instance network_info: |[{"id": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "address": "fa:16:3e:fd:0f:b3", "network": {"id": "4d6e89ba-db6e-4017-86b9-eabc5266d9be", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1854895739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7658f50bda794df68c1e82f4978d787b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d4d0ed7-ca", "ovs_interfaceid": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1537.740232] env[62510]: DEBUG oslo_concurrency.lockutils [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] Acquired lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.740413] env[62510]: DEBUG nova.network.neutron [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Refreshing network info cache for port 9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.742277] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:0f:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db068f71-08cc-42d4-8ab6-17134c1585e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d4d0ed7-cab1-4f7d-9eda-faa60a248129', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1537.750057] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Creating folder: Project (7658f50bda794df68c1e82f4978d787b). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1537.753012] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d2e5b7c-d345-40bb-9156-46a91ff35384 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.768889] env[62510]: DEBUG nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1537.772479] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Created folder: Project (7658f50bda794df68c1e82f4978d787b) in parent group-v367197. [ 1537.773566] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Creating folder: Instances. Parent ref: group-v367306. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1537.773566] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e9601cc-ccbf-4134-8b29-a9a727963fdd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.785186] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Created folder: Instances in parent group-v367306. [ 1537.785507] env[62510]: DEBUG oslo.service.loopingcall [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1537.788020] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1537.789053] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a567932-649c-45dc-b566-42b65b2900a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.807908] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1537.808238] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1537.808489] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1537.808708] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1537.808947] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1537.809152] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1537.809477] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1537.809718] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1537.809955] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1537.810195] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1537.810437] env[62510]: DEBUG nova.virt.hardware [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1537.811920] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2f1422-73ee-4d30-b6c5-142811b168c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.823742] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1104379f-dca3-4f85-9394-44fbfff60de1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.828542] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1537.828542] env[62510]: value = "task-1768585" [ 1537.828542] env[62510]: _type = "Task" [ 1537.828542] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.846686] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768585, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.913768] env[62510]: DEBUG nova.scheduler.client.report [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1537.936712] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.980976] env[62510]: INFO nova.compute.manager [-] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Took 1.47 seconds to deallocate network for instance. [ 1538.036893] env[62510]: DEBUG oslo_vmware.api [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1768582, 'name': PowerOnVM_Task, 'duration_secs': 0.538752} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.037067] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.037251] env[62510]: INFO nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Took 8.49 seconds to spawn the instance on the hypervisor. [ 1538.037456] env[62510]: DEBUG nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1538.038340] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53987016-1a95-4140-92ce-db6b9884f7ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.257951] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1538.258320] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efc02ad5-7aa9-4b9b-a8e1-ca8d811de29d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.267511] env[62510]: DEBUG oslo_vmware.api [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1538.267511] env[62510]: value = "task-1768586" [ 1538.267511] env[62510]: _type = "Task" [ 1538.267511] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.281169] env[62510]: DEBUG oslo_vmware.api [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768586, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.347128] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768585, 'name': CreateVM_Task, 'duration_secs': 0.440656} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.349423] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1538.349678] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.349987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.350565] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1538.350918] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e81448a-e1d5-4f49-a554-2180902898ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.358749] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1538.358749] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528d627b-c951-7c4b-feaf-d155e9cda6f2" [ 1538.358749] env[62510]: _type = "Task" [ 1538.358749] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.370883] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528d627b-c951-7c4b-feaf-d155e9cda6f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.420214] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.420901] env[62510]: DEBUG nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1538.424038] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.879s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.426564] env[62510]: INFO nova.compute.claims [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1538.447417] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.489441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.566366] env[62510]: DEBUG nova.network.neutron [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Successfully updated port: 47d66511-aecb-424e-91f3-0fe84c41ab26 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1538.574965] env[62510]: INFO nova.compute.manager [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Took 48.54 seconds to build instance. [ 1538.718377] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "fa43a538-1aae-4642-8370-70f2a49ca92c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.718866] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "fa43a538-1aae-4642-8370-70f2a49ca92c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.759894] env[62510]: DEBUG nova.network.neutron [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Updated VIF entry in instance network info cache for port 9d4d0ed7-cab1-4f7d-9eda-faa60a248129. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1538.759894] env[62510]: DEBUG nova.network.neutron [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Updating instance_info_cache with network_info: [{"id": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "address": "fa:16:3e:fd:0f:b3", "network": {"id": "4d6e89ba-db6e-4017-86b9-eabc5266d9be", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1854895739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7658f50bda794df68c1e82f4978d787b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d4d0ed7-ca", "ovs_interfaceid": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.785043] env[62510]: DEBUG oslo_vmware.api [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768586, 'name': PowerOffVM_Task, 'duration_secs': 0.244687} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.785624] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1538.786251] env[62510]: DEBUG nova.compute.manager [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1538.787156] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca373bb-051a-46fb-865f-8a3f2229a04a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.870855] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528d627b-c951-7c4b-feaf-d155e9cda6f2, 'name': SearchDatastore_Task, 'duration_secs': 0.012861} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.871294] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.871590] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1538.871921] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.872125] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.872350] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1538.872712] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7fe3a36-34f7-4c86-933b-507225e2a4f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.883847] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1538.884231] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1538.885197] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11cfbc98-6e4e-48d6-8e83-ad325429d63f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.892446] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1538.892446] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bfb13d-5454-5de8-812f-68d54c94a251" [ 1538.892446] env[62510]: _type = "Task" [ 1538.892446] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.904935] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bfb13d-5454-5de8-812f-68d54c94a251, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.937129] env[62510]: DEBUG nova.compute.utils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1538.952388] env[62510]: DEBUG nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1538.952829] env[62510]: DEBUG nova.network.neutron [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1538.964242] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.046084] env[62510]: DEBUG nova.policy [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f92ecaf8f484e88b2562f9c253e98ca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7605d0022ff45a8a1fb357da78ecc3d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1539.070321] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "refresh_cache-2c5d137d-4fd5-4035-a04f-bdb76e90edd7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.070321] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquired lock "refresh_cache-2c5d137d-4fd5-4035-a04f-bdb76e90edd7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.070321] env[62510]: DEBUG nova.network.neutron [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1539.077020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15893d1f-80fc-42fd-9bbf-ad62fe8e3f3b tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "3533a113-6f46-4b18-872d-9bc1b0481969" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.247s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.265139] env[62510]: DEBUG oslo_concurrency.lockutils [req-70ebbfc1-35de-424d-84f3-984280cb56c0 req-6f24e196-c19d-4cca-a85c-ca2d8ac77ac6 service nova] Releasing lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.300408] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b432a17-acf3-4f5b-a225-18a1f7e74242 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.075s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.374730] env[62510]: DEBUG nova.compute.manager [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Received event network-vif-plugged-47d66511-aecb-424e-91f3-0fe84c41ab26 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1539.374730] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] Acquiring lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.374730] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.374730] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.374730] env[62510]: DEBUG nova.compute.manager [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] No waiting events found dispatching network-vif-plugged-47d66511-aecb-424e-91f3-0fe84c41ab26 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1539.376301] env[62510]: WARNING nova.compute.manager [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Received unexpected event network-vif-plugged-47d66511-aecb-424e-91f3-0fe84c41ab26 for instance with vm_state building and task_state spawning. [ 1539.376301] env[62510]: DEBUG nova.compute.manager [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Received event network-changed-47d66511-aecb-424e-91f3-0fe84c41ab26 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1539.376301] env[62510]: DEBUG nova.compute.manager [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Refreshing instance network info cache due to event network-changed-47d66511-aecb-424e-91f3-0fe84c41ab26. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1539.376301] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] Acquiring lock "refresh_cache-2c5d137d-4fd5-4035-a04f-bdb76e90edd7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.406104] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bfb13d-5454-5de8-812f-68d54c94a251, 'name': SearchDatastore_Task, 'duration_secs': 0.019044} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.407190] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b785470d-a379-4821-bb3e-e144c34f8484 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.416138] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1539.416138] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5225c62a-044b-efce-48e3-6adf7f72b955" [ 1539.416138] env[62510]: _type = "Task" [ 1539.416138] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.431124] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5225c62a-044b-efce-48e3-6adf7f72b955, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.451451] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.452419] env[62510]: DEBUG nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1539.583251] env[62510]: DEBUG nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1539.634178] env[62510]: DEBUG nova.network.neutron [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1539.662316] env[62510]: DEBUG nova.network.neutron [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Successfully created port: 3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1539.882987] env[62510]: DEBUG nova.network.neutron [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Updating instance_info_cache with network_info: [{"id": "47d66511-aecb-424e-91f3-0fe84c41ab26", "address": "fa:16:3e:99:b3:74", "network": {"id": "779c2a5d-b192-4fe8-ab39-90d855170ebe", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1268553376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83d477e726c84d6ebf945ba7f14c8240", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d66511-ae", "ovs_interfaceid": "47d66511-aecb-424e-91f3-0fe84c41ab26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.929981] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5225c62a-044b-efce-48e3-6adf7f72b955, 'name': SearchDatastore_Task, 'duration_secs': 0.022196} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.933366] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.933366] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2c5c38c1-511f-4aae-969a-eb6de128fae7/2c5c38c1-511f-4aae-969a-eb6de128fae7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1539.933687] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b63d5c4b-635e-4428-a548-0effe07a1c14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.949319] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768578, 'name': CreateVM_Task, 'duration_secs': 3.794122} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.949748] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1539.950432] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1539.950432] env[62510]: value = "task-1768587" [ 1539.950432] env[62510]: _type = "Task" [ 1539.950432] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.954307] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.954475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.954788] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1539.955479] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16667db3-a923-4c5b-9971-9425fba83b09 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.979751] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768587, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.984895] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1539.984895] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52eeb842-d79a-4dfc-c8c8-da5d913c7ea2" [ 1539.984895] env[62510]: _type = "Task" [ 1539.984895] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.992165] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52eeb842-d79a-4dfc-c8c8-da5d913c7ea2, 'name': SearchDatastore_Task, 'duration_secs': 0.01837} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.994795] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.995106] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1539.995349] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.995494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.995664] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1539.996174] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1bde193-033b-4c98-baea-bf01741b6eff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.010334] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1540.010334] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1540.013261] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-667a56c9-134b-46a4-95a4-749be4b2f6e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.021960] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1540.021960] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ef2591-17e7-8146-cd70-60bebfb78de8" [ 1540.021960] env[62510]: _type = "Task" [ 1540.021960] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.030316] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebfcfb3-4cec-4bf1-ae35-5dec22272ad5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.036131] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ef2591-17e7-8146-cd70-60bebfb78de8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.042418] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f069c2cc-a8fe-457a-a5f6-7ec476379391 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.080463] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b1a360-5d8c-4a85-9f0a-43bdf72b9317 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.095957] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913f75ab-4673-4664-bcfc-3d26cfb800aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.104278] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.115017] env[62510]: DEBUG nova.compute.provider_tree [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1540.392878] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Releasing lock "refresh_cache-2c5d137d-4fd5-4035-a04f-bdb76e90edd7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.396136] env[62510]: DEBUG nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Instance network_info: |[{"id": "47d66511-aecb-424e-91f3-0fe84c41ab26", "address": "fa:16:3e:99:b3:74", "network": {"id": "779c2a5d-b192-4fe8-ab39-90d855170ebe", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1268553376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83d477e726c84d6ebf945ba7f14c8240", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d66511-ae", "ovs_interfaceid": "47d66511-aecb-424e-91f3-0fe84c41ab26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1540.396136] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] Acquired lock "refresh_cache-2c5d137d-4fd5-4035-a04f-bdb76e90edd7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.396136] env[62510]: DEBUG nova.network.neutron [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Refreshing network info cache for port 47d66511-aecb-424e-91f3-0fe84c41ab26 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1540.396136] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:b3:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47d66511-aecb-424e-91f3-0fe84c41ab26', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1540.407025] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Creating folder: Project (83d477e726c84d6ebf945ba7f14c8240). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1540.408605] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-899d55ba-740e-4e26-8b2c-21ccfd75ede9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.426617] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Created folder: Project (83d477e726c84d6ebf945ba7f14c8240) in parent group-v367197. [ 1540.426617] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Creating folder: Instances. Parent ref: group-v367309. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1540.426617] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-133472f4-17ce-46ea-9174-35e6d91d0986 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.441788] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Created folder: Instances in parent group-v367309. [ 1540.441788] env[62510]: DEBUG oslo.service.loopingcall [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.443979] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1540.443979] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2eac808a-630f-4946-b771-7196ca0b1453 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.467593] env[62510]: DEBUG nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1540.481128] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1540.481128] env[62510]: value = "task-1768590" [ 1540.481128] env[62510]: _type = "Task" [ 1540.481128] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.490023] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768587, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.497624] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768590, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.547325] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ef2591-17e7-8146-cd70-60bebfb78de8, 'name': SearchDatastore_Task, 'duration_secs': 0.012956} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.550825] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7574adbd-c315-46e1-97c0-b17e98ef1a80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.561985] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1540.561985] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52db7296-286e-a0cb-1097-67ed2720197d" [ 1540.561985] env[62510]: _type = "Task" [ 1540.561985] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.575683] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52db7296-286e-a0cb-1097-67ed2720197d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.602613] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1540.602964] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1540.603435] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1540.603726] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1540.603932] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1540.604126] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1540.604432] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1540.604626] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1540.604851] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1540.605093] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1540.605351] env[62510]: DEBUG nova.virt.hardware [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1540.606294] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59221696-238f-4812-8f42-9d3ec393a3bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.617151] env[62510]: DEBUG nova.scheduler.client.report [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1540.624040] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cb97dc-572e-4886-95f8-ad0326c2fb6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.829587] env[62510]: DEBUG nova.network.neutron [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Updated VIF entry in instance network info cache for port 47d66511-aecb-424e-91f3-0fe84c41ab26. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1540.830084] env[62510]: DEBUG nova.network.neutron [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Updating instance_info_cache with network_info: [{"id": "47d66511-aecb-424e-91f3-0fe84c41ab26", "address": "fa:16:3e:99:b3:74", "network": {"id": "779c2a5d-b192-4fe8-ab39-90d855170ebe", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1268553376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83d477e726c84d6ebf945ba7f14c8240", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47d66511-ae", "ovs_interfaceid": "47d66511-aecb-424e-91f3-0fe84c41ab26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.979240] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768587, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713866} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.979507] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2c5c38c1-511f-4aae-969a-eb6de128fae7/2c5c38c1-511f-4aae-969a-eb6de128fae7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1540.979721] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1540.979991] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f666c4f-e5b7-42da-b757-b83f4fa4bebc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.997158] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768590, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.999907] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1540.999907] env[62510]: value = "task-1768591" [ 1540.999907] env[62510]: _type = "Task" [ 1540.999907] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.012263] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.077420] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52db7296-286e-a0cb-1097-67ed2720197d, 'name': SearchDatastore_Task, 'duration_secs': 0.076194} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.079287] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.079287] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1541.079287] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53cd5102-a063-432c-8304-229bd6a26eef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.087761] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1541.087761] env[62510]: value = "task-1768592" [ 1541.087761] env[62510]: _type = "Task" [ 1541.087761] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.101376] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.123830] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.700s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.124401] env[62510]: DEBUG nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1541.127763] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.414s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.129226] env[62510]: INFO nova.compute.claims [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1541.338348] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7432df1-de6c-41c3-8a18-33ec7dc05608 req-a0ce9808-ca29-49fd-b69b-42ce6265474c service nova] Releasing lock "refresh_cache-2c5d137d-4fd5-4035-a04f-bdb76e90edd7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.500508] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768590, 'name': CreateVM_Task, 'duration_secs': 0.542775} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.501791] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1541.511223] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.511223] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.511223] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1541.511223] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4db6026-49a7-46ba-94a8-24eacbdeea32 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.524456] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115634} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.526233] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1541.526831] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1541.526831] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52535c52-cea2-0419-f3f9-4be071041a1f" [ 1541.526831] env[62510]: _type = "Task" [ 1541.526831] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.527788] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1780f3b1-1931-484a-bd83-63b981965e6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.568700] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 2c5c38c1-511f-4aae-969a-eb6de128fae7/2c5c38c1-511f-4aae-969a-eb6de128fae7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1541.569304] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52535c52-cea2-0419-f3f9-4be071041a1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.569637] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-080d18be-26b5-46b4-8f7d-ddf0a61c7719 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.596964] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1541.596964] env[62510]: value = "task-1768593" [ 1541.596964] env[62510]: _type = "Task" [ 1541.596964] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.600826] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768592, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.611637] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768593, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.640040] env[62510]: DEBUG nova.compute.utils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1541.642720] env[62510]: DEBUG nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1541.645043] env[62510]: DEBUG nova.network.neutron [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1541.646118] env[62510]: DEBUG nova.compute.manager [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1541.647376] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545119c0-9d77-41f5-a689-06ce8bb9c5bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.718464] env[62510]: DEBUG nova.policy [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fdc55f42fb254892842e9f110b9eb79f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4696f6ba16a84f3fa4c0daaf08d40fc7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1541.798720] env[62510]: DEBUG nova.compute.manager [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Received event network-changed-c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1541.799060] env[62510]: DEBUG nova.compute.manager [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Refreshing instance network info cache due to event network-changed-c2d4507d-2fd6-466d-9025-685dbebc79f3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1541.799060] env[62510]: DEBUG oslo_concurrency.lockutils [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] Acquiring lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.799272] env[62510]: DEBUG oslo_concurrency.lockutils [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] Acquired lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.799329] env[62510]: DEBUG nova.network.neutron [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Refreshing network info cache for port c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1541.950737] env[62510]: DEBUG nova.compute.manager [req-ffe5d64b-8bd3-4bad-9ec9-21a5975956ec req-4ba2ce12-89b2-4216-91ff-be9c34820200 service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Received event network-vif-plugged-3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1541.951075] env[62510]: DEBUG oslo_concurrency.lockutils [req-ffe5d64b-8bd3-4bad-9ec9-21a5975956ec req-4ba2ce12-89b2-4216-91ff-be9c34820200 service nova] Acquiring lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.952297] env[62510]: DEBUG oslo_concurrency.lockutils [req-ffe5d64b-8bd3-4bad-9ec9-21a5975956ec req-4ba2ce12-89b2-4216-91ff-be9c34820200 service nova] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.952557] env[62510]: DEBUG oslo_concurrency.lockutils [req-ffe5d64b-8bd3-4bad-9ec9-21a5975956ec req-4ba2ce12-89b2-4216-91ff-be9c34820200 service nova] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1541.952739] env[62510]: DEBUG nova.compute.manager [req-ffe5d64b-8bd3-4bad-9ec9-21a5975956ec req-4ba2ce12-89b2-4216-91ff-be9c34820200 service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] No waiting events found dispatching network-vif-plugged-3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1541.952924] env[62510]: WARNING nova.compute.manager [req-ffe5d64b-8bd3-4bad-9ec9-21a5975956ec req-4ba2ce12-89b2-4216-91ff-be9c34820200 service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Received unexpected event network-vif-plugged-3ed074cb-cedf-490b-b36a-d695cbf28633 for instance with vm_state building and task_state spawning. [ 1542.020937] env[62510]: DEBUG nova.network.neutron [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Successfully updated port: 3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1542.044598] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52535c52-cea2-0419-f3f9-4be071041a1f, 'name': SearchDatastore_Task, 'duration_secs': 0.058913} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.044598] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.044598] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1542.044598] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.044826] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.045017] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1542.045532] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4400cd7d-c576-4d5e-8a7a-9fbf064a234e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.056924] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1542.057161] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1542.058063] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0911c6ba-15ea-4eec-b63f-f8641861dbab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.065968] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1542.065968] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524c0ed6-3e0f-5990-5e4f-a02ba2a25f7e" [ 1542.065968] env[62510]: _type = "Task" [ 1542.065968] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.082026] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524c0ed6-3e0f-5990-5e4f-a02ba2a25f7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.099802] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.605132} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.100248] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1542.100350] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1542.100591] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c85bba0-5c5f-43bb-9fac-3cc3362bd778 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.113842] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768593, 'name': ReconfigVM_Task, 'duration_secs': 0.486278} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.118687] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 2c5c38c1-511f-4aae-969a-eb6de128fae7/2c5c38c1-511f-4aae-969a-eb6de128fae7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1542.118687] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1542.118687] env[62510]: value = "task-1768594" [ 1542.118687] env[62510]: _type = "Task" [ 1542.118687] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.118687] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6adb0dc6-950a-4cb7-900b-c58d22aa4e12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.128905] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768594, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.131999] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1542.131999] env[62510]: value = "task-1768595" [ 1542.131999] env[62510]: _type = "Task" [ 1542.131999] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.136315] env[62510]: DEBUG nova.network.neutron [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Successfully created port: ee64cf95-cf5c-4b37-9596-cab7a499649d {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1542.149506] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768595, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.150488] env[62510]: DEBUG nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1542.165017] env[62510]: INFO nova.compute.manager [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] instance snapshotting [ 1542.165017] env[62510]: WARNING nova.compute.manager [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1542.168679] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd2cf14-6d28-4325-939d-3be81496512c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.192834] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74610b76-f250-4629-acf7-f2bd49f7612b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.524426] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.524685] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquired lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.525049] env[62510]: DEBUG nova.network.neutron [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1542.591197] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524c0ed6-3e0f-5990-5e4f-a02ba2a25f7e, 'name': SearchDatastore_Task, 'duration_secs': 0.01808} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.592199] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c064ecc3-3060-44ee-8388-13a2c8d891fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.601054] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1542.601054] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ccfec3-869d-b66a-4298-7b3fce6d0370" [ 1542.601054] env[62510]: _type = "Task" [ 1542.601054] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.611082] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ccfec3-869d-b66a-4298-7b3fce6d0370, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.630354] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768594, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09742} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.630654] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1542.631873] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b3f5e9-f1d5-4882-a9f6-92f2fcb50d8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.646706] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768595, 'name': Rename_Task, 'duration_secs': 0.21557} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.657089] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1542.671474] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1542.677371] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9aff4a84-22a3-4f7b-a0f1-653eae206bf8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.679110] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23119caf-8b3e-4ceb-bac3-9db3222920f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.703172] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1542.703172] env[62510]: value = "task-1768597" [ 1542.703172] env[62510]: _type = "Task" [ 1542.703172] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.704714] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1542.704714] env[62510]: value = "task-1768596" [ 1542.704714] env[62510]: _type = "Task" [ 1542.704714] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.715327] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1542.716922] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c14a854a-1027-4cca-a3d9-8b0aa88a830c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.727133] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768597, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.730825] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.736894] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1542.736894] env[62510]: value = "task-1768598" [ 1542.736894] env[62510]: _type = "Task" [ 1542.736894] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.747240] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768598, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.849579] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795f0668-5ef0-47c1-824c-63fda36de85d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.859358] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e85dbef-4021-4b93-960e-a6ca9c65e9fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.900351] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7cb40c-7e88-46d0-9cab-3cc93122604d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.904597] env[62510]: DEBUG nova.network.neutron [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Updated VIF entry in instance network info cache for port c2d4507d-2fd6-466d-9025-685dbebc79f3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1542.904997] env[62510]: DEBUG nova.network.neutron [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Updating instance_info_cache with network_info: [{"id": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "address": "fa:16:3e:d4:ed:51", "network": {"id": "37039935-bfed-4317-b0bb-aa8df5f89dbd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1046853355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc4a16c9d5d346489a9c8efec041df23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2d4507d-2f", "ovs_interfaceid": "c2d4507d-2fd6-466d-9025-685dbebc79f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1542.911481] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83f296d-0bba-406a-9a63-de9ce0a8612a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.931159] env[62510]: DEBUG nova.compute.provider_tree [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.088380] env[62510]: DEBUG nova.network.neutron [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1543.115166] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ccfec3-869d-b66a-4298-7b3fce6d0370, 'name': SearchDatastore_Task, 'duration_secs': 0.015914} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.116338] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.116672] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2c5d137d-4fd5-4035-a04f-bdb76e90edd7/2c5d137d-4fd5-4035-a04f-bdb76e90edd7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1543.116990] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-895a6df0-4517-4c4a-9639-398be4d1f9cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.125851] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1543.125851] env[62510]: value = "task-1768599" [ 1543.125851] env[62510]: _type = "Task" [ 1543.125851] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.136320] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768599, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.178677] env[62510]: DEBUG nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1543.218520] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1543.218767] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1543.218927] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1543.219749] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1543.219749] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1543.220038] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1543.220038] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1543.220679] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1543.220679] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1543.221610] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1543.221610] env[62510]: DEBUG nova.virt.hardware [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1543.222587] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef50e88e-2f6d-4d42-964e-26a0bddcc996 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.234678] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768597, 'name': ReconfigVM_Task, 'duration_secs': 0.369604} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.235583] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1543.236545] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28fe6c7f-4d8c-4149-8ef6-af917cd1c926 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.245667] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768596, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.250613] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d5a8c3-c096-41f4-9852-a29fab257a6d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.259023] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1543.259023] env[62510]: value = "task-1768600" [ 1543.259023] env[62510]: _type = "Task" [ 1543.259023] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.105869] env[62510]: DEBUG oslo_concurrency.lockutils [req-80ee81c0-aaa7-45c5-be0b-122dced45756 req-dcb4c3ba-b8a8-4298-9516-ac05fdfdbd95 service nova] Releasing lock "refresh_cache-3533a113-6f46-4b18-872d-9bc1b0481969" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.106878] env[62510]: DEBUG nova.scheduler.client.report [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1544.112221] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768598, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.112424] env[62510]: WARNING oslo_vmware.common.loopingcall [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] task run outlasted interval by 0.367031 sec [ 1544.115778] env[62510]: DEBUG nova.network.neutron [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Successfully updated port: ee64cf95-cf5c-4b37-9596-cab7a499649d {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1544.127883] env[62510]: DEBUG nova.compute.manager [req-b5c8f7ed-0935-4f99-8a7a-c0b2e72cf390 req-26070fac-b21a-4750-8441-67259ed2d38b service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Received event network-vif-plugged-ee64cf95-cf5c-4b37-9596-cab7a499649d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1544.128166] env[62510]: DEBUG oslo_concurrency.lockutils [req-b5c8f7ed-0935-4f99-8a7a-c0b2e72cf390 req-26070fac-b21a-4750-8441-67259ed2d38b service nova] Acquiring lock "f9eb5110-28ec-474e-b80e-0bfcee51483d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.128360] env[62510]: DEBUG oslo_concurrency.lockutils [req-b5c8f7ed-0935-4f99-8a7a-c0b2e72cf390 req-26070fac-b21a-4750-8441-67259ed2d38b service nova] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.131020] env[62510]: DEBUG oslo_concurrency.lockutils [req-b5c8f7ed-0935-4f99-8a7a-c0b2e72cf390 req-26070fac-b21a-4750-8441-67259ed2d38b service nova] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.131020] env[62510]: DEBUG nova.compute.manager [req-b5c8f7ed-0935-4f99-8a7a-c0b2e72cf390 req-26070fac-b21a-4750-8441-67259ed2d38b service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] No waiting events found dispatching network-vif-plugged-ee64cf95-cf5c-4b37-9596-cab7a499649d {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1544.131020] env[62510]: WARNING nova.compute.manager [req-b5c8f7ed-0935-4f99-8a7a-c0b2e72cf390 req-26070fac-b21a-4750-8441-67259ed2d38b service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Received unexpected event network-vif-plugged-ee64cf95-cf5c-4b37-9596-cab7a499649d for instance with vm_state building and task_state spawning. [ 1544.133794] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "13cdba63-5db4-419f-9e0b-244832d7866b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.134044] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "13cdba63-5db4-419f-9e0b-244832d7866b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.138162] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768600, 'name': Rename_Task, 'duration_secs': 0.191072} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.140330] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1544.140330] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f71b0b44-64dd-403e-b040-c5d34599b8a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.149156] env[62510]: DEBUG oslo_vmware.api [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768596, 'name': PowerOnVM_Task, 'duration_secs': 0.9285} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.149572] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768598, 'name': CreateSnapshot_Task, 'duration_secs': 1.180081} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.153271] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1544.153552] env[62510]: INFO nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1544.153744] env[62510]: DEBUG nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1544.154621] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1544.154621] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768599, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651777} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.158680] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90871112-1995-45d5-af83-cf4090dba107 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.158982] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b606be2-445f-40ae-acd9-2254d501bab5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.162012] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2c5d137d-4fd5-4035-a04f-bdb76e90edd7/2c5d137d-4fd5-4035-a04f-bdb76e90edd7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1544.162245] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1544.163899] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31c0f192-ae20-49ca-abfb-3cbf75e2e33c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.165954] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1544.165954] env[62510]: value = "task-1768601" [ 1544.165954] env[62510]: _type = "Task" [ 1544.165954] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.183855] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1544.183855] env[62510]: value = "task-1768602" [ 1544.183855] env[62510]: _type = "Task" [ 1544.183855] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.184791] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768601, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.198850] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.222269] env[62510]: DEBUG nova.network.neutron [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Updating instance_info_cache with network_info: [{"id": "3ed074cb-cedf-490b-b36a-d695cbf28633", "address": "fa:16:3e:d3:29:f8", "network": {"id": "1a3ffc18-d3cd-4d21-aea2-6c09ec9f79ba", "bridge": "br-int", "label": "tempest-ServersTestJSON-2022155437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7605d0022ff45a8a1fb357da78ecc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ed074cb-ce", "ovs_interfaceid": "3ed074cb-cedf-490b-b36a-d695cbf28633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.239149] env[62510]: DEBUG nova.compute.manager [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Received event network-changed-3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1544.239149] env[62510]: DEBUG nova.compute.manager [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Refreshing instance network info cache due to event network-changed-3ed074cb-cedf-490b-b36a-d695cbf28633. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1544.239149] env[62510]: DEBUG oslo_concurrency.lockutils [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] Acquiring lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.618208] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.490s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.618782] env[62510]: DEBUG nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1544.621673] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.190s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.627026] env[62510]: INFO nova.compute.claims [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1544.633604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "refresh_cache-f9eb5110-28ec-474e-b80e-0bfcee51483d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.633604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquired lock "refresh_cache-f9eb5110-28ec-474e-b80e-0bfcee51483d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.633604] env[62510]: DEBUG nova.network.neutron [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1544.684711] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768601, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.705781] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1544.710075] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-400d9f82-2b76-4559-8704-ad9038c5e638 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.718577] env[62510]: INFO nova.compute.manager [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Took 47.33 seconds to build instance. [ 1544.726605] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Releasing lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.726605] env[62510]: DEBUG nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Instance network_info: |[{"id": "3ed074cb-cedf-490b-b36a-d695cbf28633", "address": "fa:16:3e:d3:29:f8", "network": {"id": "1a3ffc18-d3cd-4d21-aea2-6c09ec9f79ba", "bridge": "br-int", "label": "tempest-ServersTestJSON-2022155437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7605d0022ff45a8a1fb357da78ecc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ed074cb-ce", "ovs_interfaceid": "3ed074cb-cedf-490b-b36a-d695cbf28633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1544.726998] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076914} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.729638] env[62510]: DEBUG oslo_concurrency.lockutils [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] Acquired lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.729638] env[62510]: DEBUG nova.network.neutron [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Refreshing network info cache for port 3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1544.730023] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:29:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ed074cb-cedf-490b-b36a-d695cbf28633', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1544.738626] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Creating folder: Project (b7605d0022ff45a8a1fb357da78ecc3d). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.739249] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1544.739338] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1544.739338] env[62510]: value = "task-1768603" [ 1544.739338] env[62510]: _type = "Task" [ 1544.739338] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.742955] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1367a1d9-f378-4759-90fd-0dddfa879a79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.746768] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0e6bf2-709d-445f-ac9a-7a42cd8dc005 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.765410] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768603, 'name': CloneVM_Task} progress is 11%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.785754] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 2c5d137d-4fd5-4035-a04f-bdb76e90edd7/2c5d137d-4fd5-4035-a04f-bdb76e90edd7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1544.786079] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Created folder: Project (b7605d0022ff45a8a1fb357da78ecc3d) in parent group-v367197. [ 1544.786258] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Creating folder: Instances. Parent ref: group-v367313. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1544.786482] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6a12546-10ac-4f70-b788-513930afe4e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.802650] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e740f31-13b6-410d-987b-d9b11922a1b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.815443] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1544.815443] env[62510]: value = "task-1768606" [ 1544.815443] env[62510]: _type = "Task" [ 1544.815443] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.819959] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Created folder: Instances in parent group-v367313. [ 1544.820327] env[62510]: DEBUG oslo.service.loopingcall [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.820541] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1544.820752] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5b1cf06-7441-41f4-b986-23fa943292d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.841046] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768606, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.851098] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1544.851098] env[62510]: value = "task-1768607" [ 1544.851098] env[62510]: _type = "Task" [ 1544.851098] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.864337] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768607, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.986201] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "641628d1-bb6d-4207-89b9-98014328e028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.986201] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "641628d1-bb6d-4207-89b9-98014328e028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.136046] env[62510]: DEBUG nova.compute.utils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1545.137756] env[62510]: DEBUG nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1545.138147] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1545.186265] env[62510]: DEBUG oslo_vmware.api [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768601, 'name': PowerOnVM_Task, 'duration_secs': 0.555031} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.186669] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.186949] env[62510]: INFO nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Took 12.92 seconds to spawn the instance on the hypervisor. [ 1545.187415] env[62510]: DEBUG nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1545.188323] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a7fceb-851c-4b67-8817-7aaab643861b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.220146] env[62510]: DEBUG nova.network.neutron [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1545.222468] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf77c5cc-16c0-46d3-b6f5-39077837e8ba tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.578s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.224040] env[62510]: DEBUG nova.policy [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2897473b1e44271b67789290b3477c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd33701c4eedd47268e1c8d16bd63de81', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1545.267603] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768603, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.271978] env[62510]: DEBUG nova.network.neutron [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Updated VIF entry in instance network info cache for port 3ed074cb-cedf-490b-b36a-d695cbf28633. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1545.272371] env[62510]: DEBUG nova.network.neutron [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Updating instance_info_cache with network_info: [{"id": "3ed074cb-cedf-490b-b36a-d695cbf28633", "address": "fa:16:3e:d3:29:f8", "network": {"id": "1a3ffc18-d3cd-4d21-aea2-6c09ec9f79ba", "bridge": "br-int", "label": "tempest-ServersTestJSON-2022155437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7605d0022ff45a8a1fb357da78ecc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ed074cb-ce", "ovs_interfaceid": "3ed074cb-cedf-490b-b36a-d695cbf28633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.330876] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768606, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.365679] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768607, 'name': CreateVM_Task, 'duration_secs': 0.491513} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.365928] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1545.366782] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.366941] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.367378] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1545.367749] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b41bb41-ee53-49a9-9c6a-1be4539cb376 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.375117] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1545.375117] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52dcacf3-9edf-cb3b-b760-3f902027944e" [ 1545.375117] env[62510]: _type = "Task" [ 1545.375117] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.388470] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dcacf3-9edf-cb3b-b760-3f902027944e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.638635] env[62510]: DEBUG nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1545.716116] env[62510]: INFO nova.compute.manager [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Took 49.63 seconds to build instance. [ 1545.730404] env[62510]: DEBUG nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1545.776717] env[62510]: DEBUG oslo_concurrency.lockutils [req-2dcaa7e1-489c-47d7-b4bb-d3f04bb04a78 req-3c964e0c-b850-47b2-b131-9278508a426a service nova] Releasing lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.777316] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768603, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.837446] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768606, 'name': ReconfigVM_Task, 'duration_secs': 0.813921} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.837731] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 2c5d137d-4fd5-4035-a04f-bdb76e90edd7/2c5d137d-4fd5-4035-a04f-bdb76e90edd7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1545.838664] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0a21569d-68bb-46da-85d4-9e4976f740e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.841603] env[62510]: DEBUG nova.network.neutron [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Updating instance_info_cache with network_info: [{"id": "ee64cf95-cf5c-4b37-9596-cab7a499649d", "address": "fa:16:3e:d6:0d:aa", "network": {"id": "bcfed18a-426f-404e-9042-4dc7775c4c2f", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-50435281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4696f6ba16a84f3fa4c0daaf08d40fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee64cf95-cf", "ovs_interfaceid": "ee64cf95-cf5c-4b37-9596-cab7a499649d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.855449] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1545.855449] env[62510]: value = "task-1768608" [ 1545.855449] env[62510]: _type = "Task" [ 1545.855449] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.871443] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768608, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.890503] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dcacf3-9edf-cb3b-b760-3f902027944e, 'name': SearchDatastore_Task, 'duration_secs': 0.014894} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.891125] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.891340] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1545.892324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.892324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.892324] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1545.899435] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d30952e9-0e10-43ac-9009-d4f82523a75c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.910456] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1545.910669] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1545.917020] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28bbf750-a344-4cea-96f2-7fb9cf3ef4b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.924797] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1545.924797] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]526439a0-3856-6fec-5b4b-f682ce97b17a" [ 1545.924797] env[62510]: _type = "Task" [ 1545.924797] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.932495] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526439a0-3856-6fec-5b4b-f682ce97b17a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.023044] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Successfully created port: 667e6924-4cc9-4a1a-b451-453ac0491b41 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.219600] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8bd0a47a-21fb-4c12-8bb9-6e2980feb42f tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.453s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.265530] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.269357] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768603, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.283413] env[62510]: DEBUG nova.compute.manager [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Received event network-changed-ee64cf95-cf5c-4b37-9596-cab7a499649d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1546.283894] env[62510]: DEBUG nova.compute.manager [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Refreshing instance network info cache due to event network-changed-ee64cf95-cf5c-4b37-9596-cab7a499649d. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1546.283894] env[62510]: DEBUG oslo_concurrency.lockutils [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] Acquiring lock "refresh_cache-f9eb5110-28ec-474e-b80e-0bfcee51483d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.346590] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Releasing lock "refresh_cache-f9eb5110-28ec-474e-b80e-0bfcee51483d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.347245] env[62510]: DEBUG nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Instance network_info: |[{"id": "ee64cf95-cf5c-4b37-9596-cab7a499649d", "address": "fa:16:3e:d6:0d:aa", "network": {"id": "bcfed18a-426f-404e-9042-4dc7775c4c2f", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-50435281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4696f6ba16a84f3fa4c0daaf08d40fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee64cf95-cf", "ovs_interfaceid": "ee64cf95-cf5c-4b37-9596-cab7a499649d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1546.347245] env[62510]: DEBUG oslo_concurrency.lockutils [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] Acquired lock "refresh_cache-f9eb5110-28ec-474e-b80e-0bfcee51483d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.347539] env[62510]: DEBUG nova.network.neutron [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Refreshing network info cache for port ee64cf95-cf5c-4b37-9596-cab7a499649d {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.348719] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:0d:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '255460d5-71d4-4bfd-87f1-acc10085db7f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee64cf95-cf5c-4b37-9596-cab7a499649d', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1546.365298] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Creating folder: Project (4696f6ba16a84f3fa4c0daaf08d40fc7). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1546.365424] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35cfda7a-2d07-4b8e-970d-5712d545de39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.379481] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768608, 'name': Rename_Task, 'duration_secs': 0.335915} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.380151] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1546.380899] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa4dad49-61ba-4bd3-847c-bbb48835dfea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.385400] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90820fd7-d355-4502-9ad2-5989f7c5f6da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.390283] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Created folder: Project (4696f6ba16a84f3fa4c0daaf08d40fc7) in parent group-v367197. [ 1546.391398] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Creating folder: Instances. Parent ref: group-v367317. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1546.391398] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d84bfa0-538d-4ea6-bf94-5d64175de3af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.400050] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9110efae-9e10-488d-98ee-f7075fb4aa9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.402867] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1546.402867] env[62510]: value = "task-1768610" [ 1546.402867] env[62510]: _type = "Task" [ 1546.402867] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.441594] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d785e5e-96e1-4f8d-a2f3-30152ee0ec0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.444417] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Created folder: Instances in parent group-v367317. [ 1546.444682] env[62510]: DEBUG oslo.service.loopingcall [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1546.444880] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768610, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.445573] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1546.446637] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18e458d3-fbaa-4103-abe4-1034320b32b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.475458] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526439a0-3856-6fec-5b4b-f682ce97b17a, 'name': SearchDatastore_Task, 'duration_secs': 0.019191} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.476376] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff92c8c0-9c69-49e5-bb9f-520c77f4aad0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.483232] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3da36aae-d46a-42e3-b0c2-28f214d81d2d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.489273] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1546.489273] env[62510]: value = "task-1768612" [ 1546.489273] env[62510]: _type = "Task" [ 1546.489273] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.503056] env[62510]: DEBUG nova.compute.provider_tree [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.506958] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1546.506958] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527ab0df-5993-5381-9a00-7013ea890ca9" [ 1546.506958] env[62510]: _type = "Task" [ 1546.506958] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.506958] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Successfully created port: fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.517249] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768612, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.524471] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527ab0df-5993-5381-9a00-7013ea890ca9, 'name': SearchDatastore_Task, 'duration_secs': 0.015823} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.524740] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.525037] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9d5d29ea-be92-4881-9fc8-fea3f2f442d0/9d5d29ea-be92-4881-9fc8-fea3f2f442d0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1546.525382] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5246e1a-40db-4bd0-b214-6a83b2f423c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.542924] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1546.542924] env[62510]: value = "task-1768613" [ 1546.542924] env[62510]: _type = "Task" [ 1546.542924] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.553127] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.655381] env[62510]: DEBUG nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1546.691145] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1546.691409] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1546.691545] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.691748] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1546.691868] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.692195] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1546.692525] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1546.692828] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1546.692906] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1546.693098] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1546.693303] env[62510]: DEBUG nova.virt.hardware [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1546.694557] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7946d35a-2e37-4507-9bb3-4e3213085e35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.705265] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7f4f4e-e51d-4cf5-8fa3-4d9f18f8554a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.723765] env[62510]: DEBUG nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1546.770951] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768603, 'name': CloneVM_Task, 'duration_secs': 1.678858} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.771448] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Created linked-clone VM from snapshot [ 1546.772306] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd69ecbf-1f73-44df-9fbc-87f3d5f0204e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.782664] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Uploading image 30927c13-4037-47aa-8a27-1eeac883a406 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1546.815614] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1546.815614] env[62510]: value = "vm-367314" [ 1546.815614] env[62510]: _type = "VirtualMachine" [ 1546.815614] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1546.816408] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4d53419e-28ef-4621-8e48-d568f3605f16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.827953] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease: (returnval){ [ 1546.827953] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525087d6-961e-2d69-e988-7b653e90875a" [ 1546.827953] env[62510]: _type = "HttpNfcLease" [ 1546.827953] env[62510]: } obtained for exporting VM: (result){ [ 1546.827953] env[62510]: value = "vm-367314" [ 1546.827953] env[62510]: _type = "VirtualMachine" [ 1546.827953] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1546.828321] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the lease: (returnval){ [ 1546.828321] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525087d6-961e-2d69-e988-7b653e90875a" [ 1546.828321] env[62510]: _type = "HttpNfcLease" [ 1546.828321] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1546.837446] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1546.837446] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525087d6-961e-2d69-e988-7b653e90875a" [ 1546.837446] env[62510]: _type = "HttpNfcLease" [ 1546.837446] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1546.915032] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768610, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.000887] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768612, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.009309] env[62510]: DEBUG nova.scheduler.client.report [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1547.057472] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768613, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.254259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.339402] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1547.339402] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525087d6-961e-2d69-e988-7b653e90875a" [ 1547.339402] env[62510]: _type = "HttpNfcLease" [ 1547.339402] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1547.339718] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1547.339718] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525087d6-961e-2d69-e988-7b653e90875a" [ 1547.339718] env[62510]: _type = "HttpNfcLease" [ 1547.339718] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1547.340527] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07856492-a596-4d6e-9ee4-34035286fe5c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.349785] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c72ab-2d1b-af7d-b297-eb98ec9cac4e/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1547.349864] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c72ab-2d1b-af7d-b297-eb98ec9cac4e/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1547.434628] env[62510]: DEBUG oslo_vmware.api [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768610, 'name': PowerOnVM_Task, 'duration_secs': 0.895215} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.434929] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1547.435167] env[62510]: INFO nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Took 9.67 seconds to spawn the instance on the hypervisor. [ 1547.435373] env[62510]: DEBUG nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1547.436278] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7be6ca0-27ef-498a-bd90-e45c214eeb3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.448371] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f242d-9e9f-fc25-268f-6d603a755453/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1547.449370] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bac239e-4949-4d60-9a44-def94ba2681e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.456811] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f242d-9e9f-fc25-268f-6d603a755453/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1547.456999] env[62510]: ERROR oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f242d-9e9f-fc25-268f-6d603a755453/disk-0.vmdk due to incomplete transfer. [ 1547.457275] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-be130930-a13b-4e3e-954e-a5b9e8270e00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.466677] env[62510]: DEBUG oslo_vmware.rw_handles [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f242d-9e9f-fc25-268f-6d603a755453/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1547.466959] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Uploaded image 6b6ee3c5-953e-431d-bb84-94a3771138f0 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1547.470349] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1547.470623] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4d43ab8b-b0d3-42f3-8f5e-0fa0a1cae5f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.478841] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1547.478841] env[62510]: value = "task-1768615" [ 1547.478841] env[62510]: _type = "Task" [ 1547.478841] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.489035] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768615, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.506058] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768612, 'name': CreateVM_Task, 'duration_secs': 0.675257} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.506058] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1547.506058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.506058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.506058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1547.506058] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b8b5bc9-5b82-4a86-aec9-c1903e6cac96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.511022] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-734bea03-9b5f-4f55-8885-47525fd2e179 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.517411] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.895s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.518411] env[62510]: DEBUG nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1547.521699] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1547.521699] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52359ffc-3ad7-d0f8-db7f-d92629d1334b" [ 1547.521699] env[62510]: _type = "Task" [ 1547.521699] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.522447] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.225s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.522696] env[62510]: DEBUG nova.objects.instance [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'resources' on Instance uuid e9711202-67f3-4fe2-befb-f28722ddea33 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1547.532150] env[62510]: DEBUG nova.network.neutron [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Updated VIF entry in instance network info cache for port ee64cf95-cf5c-4b37-9596-cab7a499649d. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.532150] env[62510]: DEBUG nova.network.neutron [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Updating instance_info_cache with network_info: [{"id": "ee64cf95-cf5c-4b37-9596-cab7a499649d", "address": "fa:16:3e:d6:0d:aa", "network": {"id": "bcfed18a-426f-404e-9042-4dc7775c4c2f", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-50435281-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4696f6ba16a84f3fa4c0daaf08d40fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "255460d5-71d4-4bfd-87f1-acc10085db7f", "external-id": "nsx-vlan-transportzone-152", "segmentation_id": 152, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee64cf95-cf", "ovs_interfaceid": "ee64cf95-cf5c-4b37-9596-cab7a499649d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.535432] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52359ffc-3ad7-d0f8-db7f-d92629d1334b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.558855] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648094} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.563534] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9d5d29ea-be92-4881-9fc8-fea3f2f442d0/9d5d29ea-be92-4881-9fc8-fea3f2f442d0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1547.563766] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1547.564491] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78296e36-18ce-4649-abbf-1f9dddee6ea0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.573884] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1547.573884] env[62510]: value = "task-1768616" [ 1547.573884] env[62510]: _type = "Task" [ 1547.573884] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.587063] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.590789] env[62510]: DEBUG nova.compute.manager [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1547.591083] env[62510]: DEBUG nova.compute.manager [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing instance network info cache due to event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1547.591363] env[62510]: DEBUG oslo_concurrency.lockutils [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] Acquiring lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.591526] env[62510]: DEBUG oslo_concurrency.lockutils [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] Acquired lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.591685] env[62510]: DEBUG nova.network.neutron [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1547.962027] env[62510]: INFO nova.compute.manager [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Took 44.25 seconds to build instance. [ 1547.995473] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768615, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.026014] env[62510]: DEBUG nova.compute.utils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1548.033959] env[62510]: DEBUG nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1548.034299] env[62510]: DEBUG nova.network.neutron [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1548.040117] env[62510]: DEBUG oslo_concurrency.lockutils [req-46e4490a-466e-485e-bd27-200dbcadf6c8 req-9e59be2f-c06f-4735-93e9-dafe175dd510 service nova] Releasing lock "refresh_cache-f9eb5110-28ec-474e-b80e-0bfcee51483d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.051698] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52359ffc-3ad7-d0f8-db7f-d92629d1334b, 'name': SearchDatastore_Task, 'duration_secs': 0.020685} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.051698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.051698] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1548.052125] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.052192] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.052449] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1548.052605] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86485912-de6c-4442-9dc7-501f81529a5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.063846] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1548.064063] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1548.065234] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269a4ce0-1862-418e-ab36-dcca47c470a4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.084816] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1548.084816] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52235de0-d90a-6738-87b7-25f65b51ad54" [ 1548.084816] env[62510]: _type = "Task" [ 1548.084816] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.101841] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085075} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.105638] env[62510]: DEBUG nova.policy [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '635c1339dcc74d98adf84fbf48042083', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e39188afd4e94f01a5b3f1ec78cf70e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1548.108226] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1548.110505] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c586e129-6737-4f86-af31-90f874d91ae2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.118174] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52235de0-d90a-6738-87b7-25f65b51ad54, 'name': SearchDatastore_Task, 'duration_secs': 0.021856} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.119865] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-062f5bda-f624-4d81-99bd-e3754bff25b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.143471] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 9d5d29ea-be92-4881-9fc8-fea3f2f442d0/9d5d29ea-be92-4881-9fc8-fea3f2f442d0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1548.148296] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-182e5d57-4cc2-4d15-9cff-a8f7e7525fc3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.165181] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1548.165181] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5291546c-3686-ef99-ff40-6758f2b678c6" [ 1548.165181] env[62510]: _type = "Task" [ 1548.165181] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.178581] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5291546c-3686-ef99-ff40-6758f2b678c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.178990] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1548.178990] env[62510]: value = "task-1768617" [ 1548.178990] env[62510]: _type = "Task" [ 1548.178990] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.196023] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.200903] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Successfully updated port: 667e6924-4cc9-4a1a-b451-453ac0491b41 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1548.467770] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c0d9aa72-20cb-49ff-9456-89d41ade8a37 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.868s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.480791] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8adee77c-8c26-44b4-b179-3b6127573653 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "interface-2c5d137d-4fd5-4035-a04f-bdb76e90edd7-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.480791] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8adee77c-8c26-44b4-b179-3b6127573653 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "interface-2c5d137d-4fd5-4035-a04f-bdb76e90edd7-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.480791] env[62510]: DEBUG nova.objects.instance [None req-8adee77c-8c26-44b4-b179-3b6127573653 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lazy-loading 'flavor' on Instance uuid 2c5d137d-4fd5-4035-a04f-bdb76e90edd7 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1548.495969] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768615, 'name': Destroy_Task, 'duration_secs': 0.906243} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.499369] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Destroyed the VM [ 1548.499778] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1548.500558] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0016b3fb-b8d7-46b0-9ea0-10076d35c77d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.511949] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1548.511949] env[62510]: value = "task-1768618" [ 1548.511949] env[62510]: _type = "Task" [ 1548.511949] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.529358] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768618, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.529508] env[62510]: DEBUG nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1548.584675] env[62510]: DEBUG nova.compute.manager [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Received event network-changed-9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1548.585413] env[62510]: DEBUG nova.compute.manager [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Refreshing instance network info cache due to event network-changed-9d4d0ed7-cab1-4f7d-9eda-faa60a248129. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1548.585508] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Acquiring lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.585636] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Acquired lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.585799] env[62510]: DEBUG nova.network.neutron [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Refreshing network info cache for port 9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1548.660737] env[62510]: DEBUG nova.network.neutron [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updated VIF entry in instance network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1548.661187] env[62510]: DEBUG nova.network.neutron [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.687572] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5291546c-3686-ef99-ff40-6758f2b678c6, 'name': SearchDatastore_Task, 'duration_secs': 0.024205} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.692554] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.694524] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f9eb5110-28ec-474e-b80e-0bfcee51483d/f9eb5110-28ec-474e-b80e-0bfcee51483d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1548.696152] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ec5858f-dd6e-466b-ae65-27c7c0258e79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.709895] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.710974] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1548.710974] env[62510]: value = "task-1768619" [ 1548.710974] env[62510]: _type = "Task" [ 1548.710974] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.724961] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768619, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.780257] env[62510]: DEBUG nova.network.neutron [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Successfully created port: 1504f2f8-ef63-437a-b979-f2a95995a28f {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1548.806992] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccdce28d-f837-4c2b-92f9-c4cc35d37d14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.816415] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981ad65e-57b2-40e3-b4b3-4167dbfff6cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.867619] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908ec72a-741e-497d-affa-823990f08f35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.878061] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53af4a4e-c974-4495-a421-ebac75453beb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.894581] env[62510]: DEBUG nova.compute.provider_tree [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.976738] env[62510]: DEBUG nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1548.985805] env[62510]: DEBUG nova.objects.instance [None req-8adee77c-8c26-44b4-b179-3b6127573653 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lazy-loading 'pci_requests' on Instance uuid 2c5d137d-4fd5-4035-a04f-bdb76e90edd7 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1549.025276] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768618, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.163826] env[62510]: DEBUG oslo_concurrency.lockutils [req-75ec7194-0f7f-48da-a1ef-c2bcea5b831f req-d31d8901-e0b2-4707-9a0d-2872a66fe11b service nova] Releasing lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.200202] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768617, 'name': ReconfigVM_Task, 'duration_secs': 0.673248} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.201166] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 9d5d29ea-be92-4881-9fc8-fea3f2f442d0/9d5d29ea-be92-4881-9fc8-fea3f2f442d0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1549.201972] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b26e3fbb-8711-4ff8-95b5-8ddcc7af19d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.211304] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1549.211304] env[62510]: value = "task-1768620" [ 1549.211304] env[62510]: _type = "Task" [ 1549.211304] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.230615] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768620, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.230867] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768619, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.370389] env[62510]: DEBUG nova.network.neutron [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Updated VIF entry in instance network info cache for port 9d4d0ed7-cab1-4f7d-9eda-faa60a248129. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1549.370826] env[62510]: DEBUG nova.network.neutron [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Updating instance_info_cache with network_info: [{"id": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "address": "fa:16:3e:fd:0f:b3", "network": {"id": "4d6e89ba-db6e-4017-86b9-eabc5266d9be", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1854895739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.211", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7658f50bda794df68c1e82f4978d787b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db068f71-08cc-42d4-8ab6-17134c1585e5", "external-id": "nsx-vlan-transportzone-721", "segmentation_id": 721, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d4d0ed7-ca", "ovs_interfaceid": "9d4d0ed7-cab1-4f7d-9eda-faa60a248129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.399355] env[62510]: DEBUG nova.scheduler.client.report [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1549.492495] env[62510]: DEBUG nova.objects.base [None req-8adee77c-8c26-44b4-b179-3b6127573653 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Object Instance<2c5d137d-4fd5-4035-a04f-bdb76e90edd7> lazy-loaded attributes: flavor,pci_requests {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1549.492777] env[62510]: DEBUG nova.network.neutron [None req-8adee77c-8c26-44b4-b179-3b6127573653 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1549.503636] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.524735] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768618, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.538420] env[62510]: DEBUG nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1549.561488] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1549.561739] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1549.561895] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1549.562092] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1549.562245] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1549.562393] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1549.562598] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1549.562753] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1549.562927] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1549.563093] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1549.564046] env[62510]: DEBUG nova.virt.hardware [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1549.565159] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc8bcc0-aec3-48ae-9b0e-cadcc127eb8f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.573397] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5131c6c8-b845-4151-8949-889a84f497c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.589036] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8adee77c-8c26-44b4-b179-3b6127573653 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "interface-2c5d137d-4fd5-4035-a04f-bdb76e90edd7-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.110s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.726422] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768620, 'name': Rename_Task, 'duration_secs': 0.287704} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.730026] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1549.730343] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768619, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695658} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.730611] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-167c73e4-d174-4546-92b8-cb59617e59c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.732382] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f9eb5110-28ec-474e-b80e-0bfcee51483d/f9eb5110-28ec-474e-b80e-0bfcee51483d.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1549.732603] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1549.732841] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d68511c-05b2-42be-80b1-da5af5fcf1dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.741865] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1549.741865] env[62510]: value = "task-1768622" [ 1549.741865] env[62510]: _type = "Task" [ 1549.741865] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.742941] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1549.742941] env[62510]: value = "task-1768621" [ 1549.742941] env[62510]: _type = "Task" [ 1549.742941] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.757056] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768622, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.759813] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768621, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.873431] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Releasing lock "refresh_cache-2c5c38c1-511f-4aae-969a-eb6de128fae7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.873700] env[62510]: DEBUG nova.compute.manager [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received event network-vif-plugged-667e6924-4cc9-4a1a-b451-453ac0491b41 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1549.874059] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Acquiring lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.874129] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.874348] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.874451] env[62510]: DEBUG nova.compute.manager [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] No waiting events found dispatching network-vif-plugged-667e6924-4cc9-4a1a-b451-453ac0491b41 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1549.874604] env[62510]: WARNING nova.compute.manager [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received unexpected event network-vif-plugged-667e6924-4cc9-4a1a-b451-453ac0491b41 for instance with vm_state building and task_state spawning. [ 1549.874766] env[62510]: DEBUG nova.compute.manager [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received event network-changed-667e6924-4cc9-4a1a-b451-453ac0491b41 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1549.874920] env[62510]: DEBUG nova.compute.manager [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Refreshing instance network info cache due to event network-changed-667e6924-4cc9-4a1a-b451-453ac0491b41. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1549.875196] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Acquiring lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.875337] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Acquired lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.875505] env[62510]: DEBUG nova.network.neutron [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Refreshing network info cache for port 667e6924-4cc9-4a1a-b451-453ac0491b41 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.905304] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.383s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.907791] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.443s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.907947] env[62510]: DEBUG nova.objects.instance [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1549.925084] env[62510]: INFO nova.scheduler.client.report [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted allocations for instance e9711202-67f3-4fe2-befb-f28722ddea33 [ 1550.025015] env[62510]: DEBUG oslo_vmware.api [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768618, 'name': RemoveSnapshot_Task, 'duration_secs': 1.268328} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.025349] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1550.025638] env[62510]: INFO nova.compute.manager [None req-30362c47-ddbb-4616-b105-bbfb04080fb6 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Took 20.28 seconds to snapshot the instance on the hypervisor. [ 1550.257088] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768621, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.260065] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768622, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109887} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.260322] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1550.261088] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106eefde-e9ff-48b2-8208-0e6a1be76d3d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.284342] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] f9eb5110-28ec-474e-b80e-0bfcee51483d/f9eb5110-28ec-474e-b80e-0bfcee51483d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1550.284809] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8481c0e-9a71-4f9d-a90a-1e7e36105b23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.306995] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1550.306995] env[62510]: value = "task-1768623" [ 1550.306995] env[62510]: _type = "Task" [ 1550.306995] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.317295] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768623, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.424018] env[62510]: DEBUG nova.network.neutron [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1550.433545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2e6a41e1-595f-4d8f-898f-cb3b89fa2925 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e9711202-67f3-4fe2-befb-f28722ddea33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.152s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.543155] env[62510]: DEBUG nova.network.neutron [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.762032] env[62510]: DEBUG oslo_vmware.api [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768621, 'name': PowerOnVM_Task, 'duration_secs': 0.803329} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.762032] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1550.762032] env[62510]: INFO nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Took 10.29 seconds to spawn the instance on the hypervisor. [ 1550.762032] env[62510]: DEBUG nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1550.762032] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ba538d-28f4-4435-a401-5621669e6315 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.818522] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768623, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.908302] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Successfully updated port: fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1550.911884] env[62510]: DEBUG nova.network.neutron [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Successfully updated port: 1504f2f8-ef63-437a-b979-f2a95995a28f {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1550.922184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52ca6e9a-f7ac-4bb2-ab02-fc7855af3a80 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.922184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.630s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.922184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.922184] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1550.922184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.771s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.922620] env[62510]: DEBUG nova.objects.instance [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lazy-loading 'resources' on Instance uuid a09a34de-fe7c-414b-8a89-2e9271c72a5c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1550.925319] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b2fb9c-6731-41f2-8e95-a8627da47108 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.936209] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3acef77-4c04-4048-8501-d83d9996f1c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.954982] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3738bd72-6566-4df7-a413-c11eefba638e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.963748] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5336d234-622d-42e0-82c4-50d720f7ffaa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.000679] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178872MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1551.000851] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.002986] env[62510]: DEBUG nova.compute.manager [req-c7f97567-b84c-4f57-a553-7209242d4bcd req-9a1c14a6-52f0-43fe-9353-9b54205f765d service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received event network-vif-plugged-fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1551.003209] env[62510]: DEBUG oslo_concurrency.lockutils [req-c7f97567-b84c-4f57-a553-7209242d4bcd req-9a1c14a6-52f0-43fe-9353-9b54205f765d service nova] Acquiring lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.003413] env[62510]: DEBUG oslo_concurrency.lockutils [req-c7f97567-b84c-4f57-a553-7209242d4bcd req-9a1c14a6-52f0-43fe-9353-9b54205f765d service nova] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.003876] env[62510]: DEBUG oslo_concurrency.lockutils [req-c7f97567-b84c-4f57-a553-7209242d4bcd req-9a1c14a6-52f0-43fe-9353-9b54205f765d service nova] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.003876] env[62510]: DEBUG nova.compute.manager [req-c7f97567-b84c-4f57-a553-7209242d4bcd req-9a1c14a6-52f0-43fe-9353-9b54205f765d service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] No waiting events found dispatching network-vif-plugged-fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1551.003876] env[62510]: WARNING nova.compute.manager [req-c7f97567-b84c-4f57-a553-7209242d4bcd req-9a1c14a6-52f0-43fe-9353-9b54205f765d service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received unexpected event network-vif-plugged-fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 for instance with vm_state building and task_state spawning. [ 1551.045672] env[62510]: DEBUG oslo_concurrency.lockutils [req-95e528f9-69ea-46d4-9bb1-fb54faf7d15a req-8d0f7c0a-915d-4a0f-98c4-1b859a069c3c service nova] Releasing lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.281021] env[62510]: INFO nova.compute.manager [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Took 42.66 seconds to build instance. [ 1551.321686] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768623, 'name': ReconfigVM_Task, 'duration_secs': 0.677345} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.321955] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Reconfigured VM instance instance-0000002b to attach disk [datastore1] f9eb5110-28ec-474e-b80e-0bfcee51483d/f9eb5110-28ec-474e-b80e-0bfcee51483d.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.322640] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee702daa-94b2-495d-a85e-3b6f3a11e857 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.331699] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1551.331699] env[62510]: value = "task-1768624" [ 1551.331699] env[62510]: _type = "Task" [ 1551.331699] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.341737] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768624, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.412468] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.412468] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.412468] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1551.414294] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "refresh_cache-ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.414294] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "refresh_cache-ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.414294] env[62510]: DEBUG nova.network.neutron [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1551.783025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-350a6099-efcb-43d9-920f-f7686738799e tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.118s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.845925] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768624, 'name': Rename_Task, 'duration_secs': 0.235051} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.846392] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1551.846738] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e22c4f7d-2428-4e8c-85e5-40d560081e84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.856835] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1551.856835] env[62510]: value = "task-1768625" [ 1551.856835] env[62510]: _type = "Task" [ 1551.856835] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.868461] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.952832] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1e9a41-17bf-404e-a8f4-88d6dc08585a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.962244] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3be88ef-f0cd-4fe4-8018-7df0c4391130 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.996432] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbad32d1-7178-4b36-8b15-31036809a640 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.999866] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.008101] env[62510]: DEBUG nova.network.neutron [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.011366] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.011556] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.011823] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.012034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.012226] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.017860] env[62510]: INFO nova.compute.manager [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Terminating instance [ 1552.021166] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751dbd6c-7052-407e-b0dd-22700c2a677f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.045700] env[62510]: DEBUG nova.compute.provider_tree [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1552.286056] env[62510]: DEBUG nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1552.369789] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768625, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.405141] env[62510]: DEBUG nova.network.neutron [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Updating instance_info_cache with network_info: [{"id": "1504f2f8-ef63-437a-b979-f2a95995a28f", "address": "fa:16:3e:6e:f5:0b", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1504f2f8-ef", "ovs_interfaceid": "1504f2f8-ef63-437a-b979-f2a95995a28f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.534598] env[62510]: DEBUG nova.compute.manager [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1552.534832] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1552.535861] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b4417f-9baf-452e-9cfd-3b0f656532a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.544914] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1552.545291] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db8cb714-147f-4336-93f8-0518c31521fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.555024] env[62510]: DEBUG nova.scheduler.client.report [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1552.557665] env[62510]: DEBUG oslo_vmware.api [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1552.557665] env[62510]: value = "task-1768626" [ 1552.557665] env[62510]: _type = "Task" [ 1552.557665] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.567439] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.567685] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.573846] env[62510]: DEBUG oslo_vmware.api [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.666314] env[62510]: DEBUG nova.network.neutron [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Updating instance_info_cache with network_info: [{"id": "667e6924-4cc9-4a1a-b451-453ac0491b41", "address": "fa:16:3e:15:b8:da", "network": {"id": "2bb54e45-de1a-4082-8c46-8b63ef9c6213", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-750938399", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap667e6924-4c", "ovs_interfaceid": "667e6924-4cc9-4a1a-b451-453ac0491b41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "address": "fa:16:3e:ed:b1:3e", "network": {"id": "68e8cdd5-8257-4c44-ba46-30c907dc84a1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-825081581", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc653bf1-54", "ovs_interfaceid": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.811190] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.870843] env[62510]: DEBUG oslo_vmware.api [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768625, 'name': PowerOnVM_Task, 'duration_secs': 0.838072} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.871197] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1552.871449] env[62510]: INFO nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1552.871702] env[62510]: DEBUG nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1552.872786] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72e1514-fc01-4041-8641-8fc9930c047e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.908124] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "refresh_cache-ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.908124] env[62510]: DEBUG nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Instance network_info: |[{"id": "1504f2f8-ef63-437a-b979-f2a95995a28f", "address": "fa:16:3e:6e:f5:0b", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1504f2f8-ef", "ovs_interfaceid": "1504f2f8-ef63-437a-b979-f2a95995a28f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1552.908675] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:f5:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1504f2f8-ef63-437a-b979-f2a95995a28f', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1552.917386] env[62510]: DEBUG oslo.service.loopingcall [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.917667] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1552.917906] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a63be8ef-e9d2-477a-900e-d9bd4b914f1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.945053] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1552.945053] env[62510]: value = "task-1768627" [ 1552.945053] env[62510]: _type = "Task" [ 1552.945053] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.958521] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768627, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.025362] env[62510]: DEBUG nova.compute.manager [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Received event network-changed-3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1553.026198] env[62510]: DEBUG nova.compute.manager [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Refreshing instance network info cache due to event network-changed-3ed074cb-cedf-490b-b36a-d695cbf28633. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1553.026404] env[62510]: DEBUG oslo_concurrency.lockutils [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] Acquiring lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.026571] env[62510]: DEBUG oslo_concurrency.lockutils [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] Acquired lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.026751] env[62510]: DEBUG nova.network.neutron [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Refreshing network info cache for port 3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.059132] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.137s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.061756] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.387s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.063321] env[62510]: INFO nova.compute.claims [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1553.077971] env[62510]: DEBUG oslo_vmware.api [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768626, 'name': PowerOffVM_Task, 'duration_secs': 0.332512} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.078403] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1553.078474] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1553.078776] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9b92ae5-a62d-4ee1-9d10-42aeb106e483 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.087379] env[62510]: INFO nova.scheduler.client.report [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Deleted allocations for instance a09a34de-fe7c-414b-8a89-2e9271c72a5c [ 1553.119460] env[62510]: DEBUG nova.compute.manager [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Received event network-vif-plugged-1504f2f8-ef63-437a-b979-f2a95995a28f {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1553.119460] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Acquiring lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.119460] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.119460] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.119460] env[62510]: DEBUG nova.compute.manager [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] No waiting events found dispatching network-vif-plugged-1504f2f8-ef63-437a-b979-f2a95995a28f {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1553.119460] env[62510]: WARNING nova.compute.manager [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Received unexpected event network-vif-plugged-1504f2f8-ef63-437a-b979-f2a95995a28f for instance with vm_state building and task_state spawning. [ 1553.119460] env[62510]: DEBUG nova.compute.manager [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received event network-changed-fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1553.119460] env[62510]: DEBUG nova.compute.manager [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Refreshing instance network info cache due to event network-changed-fc653bf1-54d2-4e03-bb9f-b6486dd8ce76. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1553.119460] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Acquiring lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.170109] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Releasing lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.170964] env[62510]: DEBUG nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Instance network_info: |[{"id": "667e6924-4cc9-4a1a-b451-453ac0491b41", "address": "fa:16:3e:15:b8:da", "network": {"id": "2bb54e45-de1a-4082-8c46-8b63ef9c6213", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-750938399", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap667e6924-4c", "ovs_interfaceid": "667e6924-4cc9-4a1a-b451-453ac0491b41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "address": "fa:16:3e:ed:b1:3e", "network": {"id": "68e8cdd5-8257-4c44-ba46-30c907dc84a1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-825081581", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc653bf1-54", "ovs_interfaceid": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1553.171698] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Acquired lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.171698] env[62510]: DEBUG nova.network.neutron [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Refreshing network info cache for port fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.177371] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:b8:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13b62154-a0e1-4eed-bc30-6464b15993bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '667e6924-4cc9-4a1a-b451-453ac0491b41', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:b1:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c42bb08a-77b4-4bba-8166-702cbb1b5f1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fc653bf1-54d2-4e03-bb9f-b6486dd8ce76', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.183623] env[62510]: DEBUG oslo.service.loopingcall [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.184995] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.185288] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c5d4646-a4e5-4304-a975-c02407757f7f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.212462] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.212462] env[62510]: value = "task-1768629" [ 1553.212462] env[62510]: _type = "Task" [ 1553.212462] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.224145] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768629, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.401833] env[62510]: INFO nova.compute.manager [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Took 40.88 seconds to build instance. [ 1553.459295] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768627, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.507566] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1553.507566] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1553.507844] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Deleting the datastore file [datastore1] 2c5d137d-4fd5-4035-a04f-bdb76e90edd7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1553.508284] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5422780e-5697-49fe-ae91-14d9c39f2a14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.517033] env[62510]: DEBUG oslo_vmware.api [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for the task: (returnval){ [ 1553.517033] env[62510]: value = "task-1768630" [ 1553.517033] env[62510]: _type = "Task" [ 1553.517033] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.527645] env[62510]: DEBUG oslo_vmware.api [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.599043] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d88c5bdb-e1d6-41aa-a1c5-e266840506ce tempest-InstanceActionsNegativeTestJSON-635844065 tempest-InstanceActionsNegativeTestJSON-635844065-project-member] Lock "a09a34de-fe7c-414b-8a89-2e9271c72a5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.223s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.723084] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768629, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.905988] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03a34e98-4935-4b61-afdf-200267bb9b13 tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.419s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.986492] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768627, 'name': CreateVM_Task, 'duration_secs': 0.593171} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.986492] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1553.986492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.986492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.986492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.986492] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4200e7b0-d0a7-4a53-bc4f-2b92a5b0f314 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.986492] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1553.986492] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52afe36a-9466-b55d-b81b-d42b54908ef7" [ 1553.986492] env[62510]: _type = "Task" [ 1553.986492] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.988364] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52afe36a-9466-b55d-b81b-d42b54908ef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.027749] env[62510]: DEBUG oslo_vmware.api [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Task: {'id': task-1768630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.462778} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.028170] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1554.028280] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1554.028390] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1554.028577] env[62510]: INFO nova.compute.manager [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Took 1.49 seconds to destroy the instance on the hypervisor. [ 1554.028828] env[62510]: DEBUG oslo.service.loopingcall [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1554.029046] env[62510]: DEBUG nova.compute.manager [-] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1554.029141] env[62510]: DEBUG nova.network.neutron [-] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1554.051944] env[62510]: DEBUG nova.network.neutron [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Updated VIF entry in instance network info cache for port 3ed074cb-cedf-490b-b36a-d695cbf28633. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.052341] env[62510]: DEBUG nova.network.neutron [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Updating instance_info_cache with network_info: [{"id": "3ed074cb-cedf-490b-b36a-d695cbf28633", "address": "fa:16:3e:d3:29:f8", "network": {"id": "1a3ffc18-d3cd-4d21-aea2-6c09ec9f79ba", "bridge": "br-int", "label": "tempest-ServersTestJSON-2022155437-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b7605d0022ff45a8a1fb357da78ecc3d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ed074cb-ce", "ovs_interfaceid": "3ed074cb-cedf-490b-b36a-d695cbf28633", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.205870] env[62510]: DEBUG nova.network.neutron [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Updated VIF entry in instance network info cache for port fc653bf1-54d2-4e03-bb9f-b6486dd8ce76. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.206433] env[62510]: DEBUG nova.network.neutron [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Updating instance_info_cache with network_info: [{"id": "667e6924-4cc9-4a1a-b451-453ac0491b41", "address": "fa:16:3e:15:b8:da", "network": {"id": "2bb54e45-de1a-4082-8c46-8b63ef9c6213", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-750938399", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13b62154-a0e1-4eed-bc30-6464b15993bb", "external-id": "nsx-vlan-transportzone-514", "segmentation_id": 514, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap667e6924-4c", "ovs_interfaceid": "667e6924-4cc9-4a1a-b451-453ac0491b41", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "address": "fa:16:3e:ed:b1:3e", "network": {"id": "68e8cdd5-8257-4c44-ba46-30c907dc84a1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-825081581", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc653bf1-54", "ovs_interfaceid": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.223578] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768629, 'name': CreateVM_Task, 'duration_secs': 1.00878} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.231964] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1554.232334] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.410816] env[62510]: DEBUG nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1554.495516] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52afe36a-9466-b55d-b81b-d42b54908ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.014646} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.495831] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.496432] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1554.496759] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.496916] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.497635] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1554.497973] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.498330] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1554.498574] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3c476de-7828-4ed4-baf2-8d4f1c1b2c6b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.500772] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-708d71fa-6ffa-4a12-b705-60fe5c9ed6c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.512315] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1554.512315] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5227a5f5-f7db-44f7-f8a3-c2ebbdad90c3" [ 1554.512315] env[62510]: _type = "Task" [ 1554.512315] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.513798] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1554.513993] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1554.520050] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2c77c5-ba81-43e9-9ac8-dcdbf970816c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.529552] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1554.529552] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f75565-ce4e-1ca1-ec3c-1d65a77476ab" [ 1554.529552] env[62510]: _type = "Task" [ 1554.529552] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.533112] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5227a5f5-f7db-44f7-f8a3-c2ebbdad90c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.545791] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f75565-ce4e-1ca1-ec3c-1d65a77476ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.555317] env[62510]: DEBUG oslo_concurrency.lockutils [req-38270e72-182c-47a6-bcba-7f6b3d10764f req-9aef640e-b0f6-42ea-b873-f906c22384db service nova] Releasing lock "refresh_cache-9d5d29ea-be92-4881-9fc8-fea3f2f442d0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.682432] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598f0fb7-cd4c-4fcc-8fba-ca1730d0eb63 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.691087] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38185c0-47e3-45bc-8503-a14c5222a5c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.726025] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Releasing lock "refresh_cache-87d1d75e-41c4-42e6-bf58-deabb71400e1" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.726335] env[62510]: DEBUG nova.compute.manager [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Received event network-changed-1504f2f8-ef63-437a-b979-f2a95995a28f {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1554.726507] env[62510]: DEBUG nova.compute.manager [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Refreshing instance network info cache due to event network-changed-1504f2f8-ef63-437a-b979-f2a95995a28f. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1554.726716] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Acquiring lock "refresh_cache-ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.726857] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Acquired lock "refresh_cache-ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.727031] env[62510]: DEBUG nova.network.neutron [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Refreshing network info cache for port 1504f2f8-ef63-437a-b979-f2a95995a28f {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1554.728974] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcdbcb3-8b5c-4367-8016-bfc38927d155 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.739204] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c4ddb6-c398-4c10-b862-9d377160f937 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.755780] env[62510]: DEBUG nova.compute.provider_tree [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.934498] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.949067] env[62510]: DEBUG nova.network.neutron [-] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.028314] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5227a5f5-f7db-44f7-f8a3-c2ebbdad90c3, 'name': SearchDatastore_Task, 'duration_secs': 0.0252} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.028683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.028942] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1555.029213] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.046578] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f75565-ce4e-1ca1-ec3c-1d65a77476ab, 'name': SearchDatastore_Task, 'duration_secs': 0.024749} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.049243] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56ae25f5-ec7a-43e0-aa58-876fbd903d20 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.055949] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1555.055949] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525b2289-c18d-7bee-f987-36e70a3aa46a" [ 1555.055949] env[62510]: _type = "Task" [ 1555.055949] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.065749] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525b2289-c18d-7bee-f987-36e70a3aa46a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.108398] env[62510]: DEBUG nova.compute.manager [req-c5c12c74-3551-46fb-8ddc-91fbc8fbb11e req-c6c0b4bb-f6d6-41a0-9d7a-323db266582b service nova] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Received event network-vif-deleted-47d66511-aecb-424e-91f3-0fe84c41ab26 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1555.258904] env[62510]: DEBUG nova.scheduler.client.report [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1555.452647] env[62510]: INFO nova.compute.manager [-] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Took 1.42 seconds to deallocate network for instance. [ 1555.571892] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525b2289-c18d-7bee-f987-36e70a3aa46a, 'name': SearchDatastore_Task, 'duration_secs': 0.017433} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.572219] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.572501] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3/ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1555.572802] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.572991] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1555.573253] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18114be6-b42e-49a0-a50e-e11a4218295b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.576371] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b997a37-a260-41a5-97a2-ab026ffd5978 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.580307] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.581021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.581021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.581021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.581175] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.583479] env[62510]: INFO nova.compute.manager [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Terminating instance [ 1555.585976] env[62510]: DEBUG nova.network.neutron [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Updated VIF entry in instance network info cache for port 1504f2f8-ef63-437a-b979-f2a95995a28f. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1555.588176] env[62510]: DEBUG nova.network.neutron [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Updating instance_info_cache with network_info: [{"id": "1504f2f8-ef63-437a-b979-f2a95995a28f", "address": "fa:16:3e:6e:f5:0b", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1504f2f8-ef", "ovs_interfaceid": "1504f2f8-ef63-437a-b979-f2a95995a28f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.590737] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1555.590737] env[62510]: value = "task-1768631" [ 1555.590737] env[62510]: _type = "Task" [ 1555.590737] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.596916] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1555.597139] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1555.603804] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb043230-034f-4f61-a766-4d0a778ac3d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.625033] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1555.625033] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ae9dc5-0b15-ceac-e791-f586aecef00c" [ 1555.625033] env[62510]: _type = "Task" [ 1555.625033] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.625033] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.636647] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ae9dc5-0b15-ceac-e791-f586aecef00c, 'name': SearchDatastore_Task} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.637762] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b714de3-8e4f-4401-ba67-b9f308f5d611 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.645140] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1555.645140] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ea69f9-bcdd-acda-43ac-886978c7fce6" [ 1555.645140] env[62510]: _type = "Task" [ 1555.645140] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.655599] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ea69f9-bcdd-acda-43ac-886978c7fce6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.765621] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.766206] env[62510]: DEBUG nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1555.769568] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.022s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.769881] env[62510]: DEBUG nova.objects.instance [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lazy-loading 'resources' on Instance uuid 0158d7af-d3bb-4d9c-a7c6-fbab943977e2 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1555.961808] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.096444] env[62510]: DEBUG nova.compute.manager [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1556.096692] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1556.097288] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a52fa35-9459-4a70-939e-27b4154cf4ab req-1f4e5eb6-3869-401d-b33f-6fff927172f7 service nova] Releasing lock "refresh_cache-ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.098387] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8a49a2-0376-4fd4-9c30-c009f4bed3b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.113590] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768631, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.113906] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1556.114192] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a289ae9b-4b2f-49f9-a8f2-ed6d15b8ebfb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.123956] env[62510]: DEBUG oslo_vmware.api [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1556.123956] env[62510]: value = "task-1768632" [ 1556.123956] env[62510]: _type = "Task" [ 1556.123956] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.134632] env[62510]: DEBUG oslo_vmware.api [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768632, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.160120] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ea69f9-bcdd-acda-43ac-886978c7fce6, 'name': SearchDatastore_Task, 'duration_secs': 0.014999} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.160120] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.160120] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 87d1d75e-41c4-42e6-bf58-deabb71400e1/87d1d75e-41c4-42e6-bf58-deabb71400e1.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1556.160120] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee4fd397-edae-4591-a900-af19d7878370 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.167418] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1556.167418] env[62510]: value = "task-1768633" [ 1556.167418] env[62510]: _type = "Task" [ 1556.167418] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.178615] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768633, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.188666] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "f9eb5110-28ec-474e-b80e-0bfcee51483d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.188953] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.189195] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "f9eb5110-28ec-474e-b80e-0bfcee51483d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.189504] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.189767] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.192209] env[62510]: INFO nova.compute.manager [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Terminating instance [ 1556.273751] env[62510]: DEBUG nova.compute.utils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1556.279813] env[62510]: DEBUG nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1556.280926] env[62510]: DEBUG nova.network.neutron [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1556.349664] env[62510]: DEBUG nova.policy [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec8f49592421487c89b77efc86542f3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c74083aa7b4a4db5b9b6d6248beb3ff3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1556.614370] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595411} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.614651] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3/ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1556.614849] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1556.615676] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-148128a5-fd5c-4e66-9630-40a57d886a99 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.627429] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1556.627429] env[62510]: value = "task-1768634" [ 1556.627429] env[62510]: _type = "Task" [ 1556.627429] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.643471] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768634, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.643771] env[62510]: DEBUG oslo_vmware.api [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768632, 'name': PowerOffVM_Task, 'duration_secs': 0.219165} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.646937] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1556.647237] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1556.648912] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7f3dce9-9f45-4f87-b3ea-f925477b20b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.680961] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768633, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.695969] env[62510]: DEBUG nova.compute.manager [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1556.696212] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1556.697193] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48544b59-8584-4dd3-a266-2353f808119f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.709970] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1556.710346] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-765f3db0-c5ef-4558-9e4c-667b93383f66 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.720949] env[62510]: DEBUG oslo_vmware.api [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1556.720949] env[62510]: value = "task-1768636" [ 1556.720949] env[62510]: _type = "Task" [ 1556.720949] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.726021] env[62510]: DEBUG nova.network.neutron [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Successfully created port: 21d515b9-b00f-45cc-9437-318ee6bba755 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1556.738784] env[62510]: DEBUG oslo_vmware.api [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768636, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.764709] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1556.764866] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1556.765090] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Deleting the datastore file [datastore1] 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1556.765347] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8ce7eac-2fe8-4e64-b9e7-81c2b2420abc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.774630] env[62510]: DEBUG oslo_vmware.api [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1556.774630] env[62510]: value = "task-1768637" [ 1556.774630] env[62510]: _type = "Task" [ 1556.774630] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.781062] env[62510]: DEBUG nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1556.787590] env[62510]: DEBUG oslo_vmware.api [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.885982] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363ed193-24f6-40fe-a7bd-f0b7f9ba168c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.897667] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7a4a3a-7d81-4d07-b496-811ed2e0d32d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.930748] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6f8f18-07bf-434d-85c7-59f8b69be511 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.940862] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d4aedf-4d71-4eda-be7c-859338697cab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.957471] env[62510]: DEBUG nova.compute.provider_tree [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1557.144768] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768634, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083629} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.146020] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1557.146980] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d178cd92-f190-432e-82b0-a28b69a3ad5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.180345] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3/ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1557.181109] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fc7bb04-dfa7-4681-a86d-e82be7e95b30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.218758] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768633, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.808542} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.220862] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 87d1d75e-41c4-42e6-bf58-deabb71400e1/87d1d75e-41c4-42e6-bf58-deabb71400e1.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1557.221676] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1557.221888] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1557.221888] env[62510]: value = "task-1768638" [ 1557.221888] env[62510]: _type = "Task" [ 1557.221888] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.222254] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f397c1a5-4aaf-4090-ae91-73ad83e18793 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.237700] env[62510]: DEBUG oslo_vmware.api [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768636, 'name': PowerOffVM_Task, 'duration_secs': 0.413252} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.242015] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1557.242214] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1557.242549] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1557.242549] env[62510]: value = "task-1768639" [ 1557.242549] env[62510]: _type = "Task" [ 1557.242549] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.242743] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768638, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.242964] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cefff607-195f-4f7e-8894-ae1141937358 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.254665] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768639, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.284239] env[62510]: DEBUG oslo_vmware.api [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.434911} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.284606] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1557.284810] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1557.285314] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1557.285314] env[62510]: INFO nova.compute.manager [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1557.285485] env[62510]: DEBUG oslo.service.loopingcall [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1557.285687] env[62510]: DEBUG nova.compute.manager [-] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1557.285800] env[62510]: DEBUG nova.network.neutron [-] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1557.352916] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1557.353250] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1557.353508] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Deleting the datastore file [datastore1] f9eb5110-28ec-474e-b80e-0bfcee51483d {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1557.353825] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01b1907e-af0d-44bc-b3e4-4e52a03bddf8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.365418] env[62510]: DEBUG oslo_vmware.api [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for the task: (returnval){ [ 1557.365418] env[62510]: value = "task-1768641" [ 1557.365418] env[62510]: _type = "Task" [ 1557.365418] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.376535] env[62510]: DEBUG oslo_vmware.api [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.461023] env[62510]: DEBUG nova.scheduler.client.report [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1557.748933] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768638, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.759884] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768639, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.799864] env[62510]: DEBUG nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1557.830193] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:36:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c7683c68-4a26-4844-9915-d8d489d9d625',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1203151111',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1557.830486] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1557.830651] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1557.830836] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1557.830983] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1557.833174] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1557.833496] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1557.835411] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1557.835667] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1557.835799] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1557.835989] env[62510]: DEBUG nova.virt.hardware [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1557.836913] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360d13c1-f506-4396-b5fd-7a8975415b68 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.846848] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111f445f-b5a8-4d88-a700-9c82b008894b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.876626] env[62510]: DEBUG oslo_vmware.api [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Task: {'id': task-1768641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278535} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.876888] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1557.877259] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1557.877519] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1557.879163] env[62510]: INFO nova.compute.manager [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1557.879163] env[62510]: DEBUG oslo.service.loopingcall [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1557.879163] env[62510]: DEBUG nova.compute.manager [-] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1557.879163] env[62510]: DEBUG nova.network.neutron [-] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1557.965960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.968331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.187s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.968574] env[62510]: DEBUG nova.objects.instance [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lazy-loading 'resources' on Instance uuid cfe53f9c-d78b-4af7-b991-f3549c03f22d {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1557.972968] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c72ab-2d1b-af7d-b297-eb98ec9cac4e/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1557.974039] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f913850-0ed1-42bc-a901-9615164095b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.983643] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c72ab-2d1b-af7d-b297-eb98ec9cac4e/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1557.983643] env[62510]: ERROR oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c72ab-2d1b-af7d-b297-eb98ec9cac4e/disk-0.vmdk due to incomplete transfer. [ 1557.983904] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a1232b90-97b6-4ef9-a2b7-4a9734426681 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.994848] env[62510]: DEBUG oslo_vmware.rw_handles [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c72ab-2d1b-af7d-b297-eb98ec9cac4e/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1557.995093] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Uploaded image 30927c13-4037-47aa-8a27-1eeac883a406 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1557.997252] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1557.997562] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-94ef8d2a-adc0-4964-a87b-288e10855bab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.006768] env[62510]: INFO nova.scheduler.client.report [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted allocations for instance 0158d7af-d3bb-4d9c-a7c6-fbab943977e2 [ 1558.011686] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1558.011686] env[62510]: value = "task-1768642" [ 1558.011686] env[62510]: _type = "Task" [ 1558.011686] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.026165] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768642, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.243175] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768638, 'name': ReconfigVM_Task, 'duration_secs': 0.614338} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.243519] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Reconfigured VM instance instance-0000002d to attach disk [datastore1] ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3/ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1558.244371] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96668ac1-abe6-40ee-8a69-cbe7a2273b70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.253803] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1558.253803] env[62510]: value = "task-1768643" [ 1558.253803] env[62510]: _type = "Task" [ 1558.253803] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.262094] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768639, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.583752} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.262792] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.263648] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0f41d1-b854-4320-8c51-e657407214dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.270040] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768643, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.294451] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 87d1d75e-41c4-42e6-bf58-deabb71400e1/87d1d75e-41c4-42e6-bf58-deabb71400e1.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.295195] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d68b258e-639e-4989-987a-5ce59600c4e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.323364] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1558.323364] env[62510]: value = "task-1768644" [ 1558.323364] env[62510]: _type = "Task" [ 1558.323364] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.336638] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768644, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.532101] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ffaedeeb-ed18-4a73-af91-196101df9349 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "0158d7af-d3bb-4d9c-a7c6-fbab943977e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.195s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.534202] env[62510]: DEBUG nova.network.neutron [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Successfully updated port: 21d515b9-b00f-45cc-9437-318ee6bba755 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1558.545431] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768642, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.716506] env[62510]: DEBUG nova.network.neutron [-] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.755164] env[62510]: DEBUG nova.compute.manager [req-b5b9aedf-b323-47a3-85f9-240172734ea6 req-9c4702a2-6fbd-4712-bede-1628d4ad54fc service nova] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Received event network-vif-deleted-fac4d91c-6432-4063-8e7b-93f076611d87 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1558.772691] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768643, 'name': Rename_Task, 'duration_secs': 0.294273} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.774476] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1558.774779] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd1c1d49-b6cd-425b-b3c4-7b24b090ae25 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.785678] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1558.785678] env[62510]: value = "task-1768645" [ 1558.785678] env[62510]: _type = "Task" [ 1558.785678] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.804118] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.838079] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768644, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.853279] env[62510]: DEBUG nova.compute.manager [req-d1ef7028-6077-4c4a-87e3-eb2bbf56653a req-05649a05-b110-4ac2-9403-ff2dd06748e6 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Received event network-vif-plugged-21d515b9-b00f-45cc-9437-318ee6bba755 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1558.853510] env[62510]: DEBUG oslo_concurrency.lockutils [req-d1ef7028-6077-4c4a-87e3-eb2bbf56653a req-05649a05-b110-4ac2-9403-ff2dd06748e6 service nova] Acquiring lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.853724] env[62510]: DEBUG oslo_concurrency.lockutils [req-d1ef7028-6077-4c4a-87e3-eb2bbf56653a req-05649a05-b110-4ac2-9403-ff2dd06748e6 service nova] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.853889] env[62510]: DEBUG oslo_concurrency.lockutils [req-d1ef7028-6077-4c4a-87e3-eb2bbf56653a req-05649a05-b110-4ac2-9403-ff2dd06748e6 service nova] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.854083] env[62510]: DEBUG nova.compute.manager [req-d1ef7028-6077-4c4a-87e3-eb2bbf56653a req-05649a05-b110-4ac2-9403-ff2dd06748e6 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] No waiting events found dispatching network-vif-plugged-21d515b9-b00f-45cc-9437-318ee6bba755 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1558.854230] env[62510]: WARNING nova.compute.manager [req-d1ef7028-6077-4c4a-87e3-eb2bbf56653a req-05649a05-b110-4ac2-9403-ff2dd06748e6 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Received unexpected event network-vif-plugged-21d515b9-b00f-45cc-9437-318ee6bba755 for instance with vm_state building and task_state spawning. [ 1559.033627] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768642, 'name': Destroy_Task, 'duration_secs': 0.998634} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.033916] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Destroyed the VM [ 1559.034190] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1559.034492] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f3850c39-930e-4774-acd1-89eb81707538 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.039948] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.039948] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.039948] env[62510]: DEBUG nova.network.neutron [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1559.042603] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1559.042603] env[62510]: value = "task-1768646" [ 1559.042603] env[62510]: _type = "Task" [ 1559.042603] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.054210] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768646, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.092025] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839b9345-9ff9-42d2-a0c2-f02b0798809b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.101873] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f7dab9-4d39-46ae-86fc-e8ffada31bcd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.144505] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0529368-66f9-4157-825b-6dbd9222c740 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.153693] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b02d7d3-9994-4b07-9ceb-134df2ef28bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.177556] env[62510]: DEBUG nova.compute.provider_tree [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.220719] env[62510]: INFO nova.compute.manager [-] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Took 1.93 seconds to deallocate network for instance. [ 1559.278929] env[62510]: DEBUG nova.network.neutron [-] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.306188] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768645, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.346406] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768644, 'name': ReconfigVM_Task, 'duration_secs': 0.828506} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.346406] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 87d1d75e-41c4-42e6-bf58-deabb71400e1/87d1d75e-41c4-42e6-bf58-deabb71400e1.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.347017] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c477d8ad-4d87-46ee-9832-6771165ed9b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.357717] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1559.357717] env[62510]: value = "task-1768647" [ 1559.357717] env[62510]: _type = "Task" [ 1559.357717] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.371630] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768647, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.562248] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768646, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.595578] env[62510]: DEBUG nova.network.neutron [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1559.680570] env[62510]: DEBUG nova.scheduler.client.report [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1559.729609] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.783567] env[62510]: INFO nova.compute.manager [-] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Took 1.91 seconds to deallocate network for instance. [ 1559.803374] env[62510]: DEBUG oslo_vmware.api [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768645, 'name': PowerOnVM_Task, 'duration_secs': 0.659166} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.805478] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1559.805478] env[62510]: INFO nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Took 10.27 seconds to spawn the instance on the hypervisor. [ 1559.805478] env[62510]: DEBUG nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1559.805478] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eadde0-8c69-420c-bde9-70616c806558 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.868785] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768647, 'name': Rename_Task, 'duration_secs': 0.279892} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.869076] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1559.870193] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e46f44bf-35a3-4dd1-9e82-0fd40436ea3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.877754] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1559.877754] env[62510]: value = "task-1768648" [ 1559.877754] env[62510]: _type = "Task" [ 1559.877754] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.892029] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.907573] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.910534] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.910534] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.910534] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.910534] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.911851] env[62510]: INFO nova.compute.manager [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Terminating instance [ 1560.007984] env[62510]: DEBUG nova.network.neutron [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance_info_cache with network_info: [{"id": "21d515b9-b00f-45cc-9437-318ee6bba755", "address": "fa:16:3e:66:30:96", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d515b9-b0", "ovs_interfaceid": "21d515b9-b00f-45cc-9437-318ee6bba755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.054754] env[62510]: DEBUG oslo_vmware.api [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768646, 'name': RemoveSnapshot_Task, 'duration_secs': 0.607146} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.055142] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1560.055396] env[62510]: INFO nova.compute.manager [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Took 17.89 seconds to snapshot the instance on the hypervisor. [ 1560.189041] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.220s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.192511] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.193403] env[62510]: DEBUG nova.objects.instance [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lazy-loading 'resources' on Instance uuid 2d2ab209-8072-4e64-8170-50d96d71bc54 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1560.232023] env[62510]: INFO nova.scheduler.client.report [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Deleted allocations for instance cfe53f9c-d78b-4af7-b991-f3549c03f22d [ 1560.290672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.329725] env[62510]: INFO nova.compute.manager [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Took 36.92 seconds to build instance. [ 1560.390304] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768648, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.418280] env[62510]: DEBUG nova.compute.manager [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1560.418504] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1560.419456] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b22de0d-158f-4b06-b147-d7523e3e83a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.429295] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1560.429579] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d4d7d28-1dcb-47b0-bda0-672f8e133dea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.510839] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.511216] env[62510]: DEBUG nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Instance network_info: |[{"id": "21d515b9-b00f-45cc-9437-318ee6bba755", "address": "fa:16:3e:66:30:96", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d515b9-b0", "ovs_interfaceid": "21d515b9-b00f-45cc-9437-318ee6bba755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1560.511817] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:30:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21d515b9-b00f-45cc-9437-318ee6bba755', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1560.521671] env[62510]: DEBUG oslo.service.loopingcall [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1560.521994] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1560.522306] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-024ea792-00a8-4fb9-abaf-5d1246247358 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.539155] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1560.539382] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1560.539564] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleting the datastore file [datastore1] 58e71d67-aed2-4329-ab60-4dfacff1d0a2 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1560.540313] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f25e70cc-11c4-49b8-a42f-08da85d5a849 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.547919] env[62510]: DEBUG oslo_vmware.api [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1560.547919] env[62510]: value = "task-1768651" [ 1560.547919] env[62510]: _type = "Task" [ 1560.547919] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.549350] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1560.549350] env[62510]: value = "task-1768650" [ 1560.549350] env[62510]: _type = "Task" [ 1560.549350] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.564957] env[62510]: DEBUG nova.compute.manager [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Instance disappeared during snapshot {{(pid=62510) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1560.567674] env[62510]: DEBUG oslo_vmware.api [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.572829] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768650, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.581587] env[62510]: DEBUG nova.compute.manager [None req-b51315f3-15cf-4911-80a4-77ca9376f350 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image not found during clean up 30927c13-4037-47aa-8a27-1eeac883a406 {{(pid=62510) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1560.738536] env[62510]: DEBUG oslo_concurrency.lockutils [None req-294dbbf3-95f1-4d8d-87d7-d788f242b26d tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "cfe53f9c-d78b-4af7-b991-f3549c03f22d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.983s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.832663] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4fb1080-2670-4656-af4d-6c0d8e920ec9 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.939s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.851178] env[62510]: DEBUG nova.compute.manager [req-97a25761-945c-46f0-87ad-6492a96eec11 req-edf78593-c18c-4281-b022-507527eaed2d service nova] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Received event network-vif-deleted-ee64cf95-cf5c-4b37-9596-cab7a499649d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1560.878492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "d3e25d50-f315-439b-9e9f-8e454a0631d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.878776] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.891009] env[62510]: DEBUG nova.compute.manager [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Received event network-changed-21d515b9-b00f-45cc-9437-318ee6bba755 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1560.891247] env[62510]: DEBUG nova.compute.manager [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Refreshing instance network info cache due to event network-changed-21d515b9-b00f-45cc-9437-318ee6bba755. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1560.891483] env[62510]: DEBUG oslo_concurrency.lockutils [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] Acquiring lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.891630] env[62510]: DEBUG oslo_concurrency.lockutils [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] Acquired lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.891792] env[62510]: DEBUG nova.network.neutron [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Refreshing network info cache for port 21d515b9-b00f-45cc-9437-318ee6bba755 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1560.899382] env[62510]: DEBUG oslo_vmware.api [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768648, 'name': PowerOnVM_Task, 'duration_secs': 0.764203} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.900041] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.901220] env[62510]: INFO nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Took 14.24 seconds to spawn the instance on the hypervisor. [ 1560.901220] env[62510]: DEBUG nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1560.901220] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716bbbf1-923e-4cf5-a422-149c83d4b4e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.072889] env[62510]: DEBUG oslo_vmware.api [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194131} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.073117] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768650, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.076102] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1561.076288] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1561.076474] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1561.076650] env[62510]: INFO nova.compute.manager [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1561.076898] env[62510]: DEBUG oslo.service.loopingcall [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.077324] env[62510]: DEBUG nova.compute.manager [-] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1561.077428] env[62510]: DEBUG nova.network.neutron [-] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1561.324524] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed104c21-adc3-4120-a599-12f52d30a518 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.333741] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1732affc-bfa6-458b-aabd-8dc445405139 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.338667] env[62510]: DEBUG nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1561.377507] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa33e439-a0c0-4df9-ad9d-cc83ebfc62f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.386106] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef71d392-8046-43c9-b364-4f24faeab693 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.407647] env[62510]: DEBUG nova.compute.provider_tree [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.423098] env[62510]: INFO nova.compute.manager [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Took 40.73 seconds to build instance. [ 1561.570575] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768650, 'name': CreateVM_Task, 'duration_secs': 0.587224} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.570776] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1561.571484] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.571663] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.572380] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1561.572731] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db1043c7-be42-4fc3-9038-3a191ebb8b42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.578397] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1561.578397] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a31118-1809-d3e7-0049-0a292a43003f" [ 1561.578397] env[62510]: _type = "Task" [ 1561.578397] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.589757] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a31118-1809-d3e7-0049-0a292a43003f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.862149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.913938] env[62510]: DEBUG nova.scheduler.client.report [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1561.926595] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cdf8fde8-ebb2-42be-b49b-0ecec959ceb5 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.154s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.961936] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "0d27da5c-20f3-4df1-86d2-036c904fd657" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.962185] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.983551] env[62510]: DEBUG nova.network.neutron [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updated VIF entry in instance network info cache for port 21d515b9-b00f-45cc-9437-318ee6bba755. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1561.983900] env[62510]: DEBUG nova.network.neutron [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance_info_cache with network_info: [{"id": "21d515b9-b00f-45cc-9437-318ee6bba755", "address": "fa:16:3e:66:30:96", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d515b9-b0", "ovs_interfaceid": "21d515b9-b00f-45cc-9437-318ee6bba755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.009775] env[62510]: DEBUG nova.network.neutron [-] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.091150] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a31118-1809-d3e7-0049-0a292a43003f, 'name': SearchDatastore_Task, 'duration_secs': 0.016566} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.091699] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.092113] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1562.092566] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.092854] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.093170] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1562.093545] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c83920f-9bbe-4a9f-9e2d-2247bb01b546 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.104053] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1562.105875] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1562.105875] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b9bcc65-4e53-4b3e-a8c3-572ae17072cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.116028] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1562.116028] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52293ad8-685a-bbfa-81a1-fa963d52a17e" [ 1562.116028] env[62510]: _type = "Task" [ 1562.116028] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.123030] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52293ad8-685a-bbfa-81a1-fa963d52a17e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.420305] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.228s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.425197] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.917s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.425519] env[62510]: INFO nova.compute.claims [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.432335] env[62510]: DEBUG nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1562.469577] env[62510]: INFO nova.scheduler.client.report [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Deleted allocations for instance 2d2ab209-8072-4e64-8170-50d96d71bc54 [ 1562.487229] env[62510]: DEBUG oslo_concurrency.lockutils [req-be8686b2-707d-42cf-9cbf-a1f30be2ac1a req-ca0cfc21-405e-42f2-a6e2-16f238ddef42 service nova] Releasing lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.502262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "87d1d75e-41c4-42e6-bf58-deabb71400e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.502262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.502262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.502262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.502262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.506577] env[62510]: INFO nova.compute.manager [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Terminating instance [ 1562.514728] env[62510]: INFO nova.compute.manager [-] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Took 1.43 seconds to deallocate network for instance. [ 1562.625271] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52293ad8-685a-bbfa-81a1-fa963d52a17e, 'name': SearchDatastore_Task, 'duration_secs': 0.011988} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.626747] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79aa3f69-72c6-4e15-8d08-bacd2a8a618b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.634735] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1562.634735] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52811be2-53e6-b082-8317-4ac20aff8d66" [ 1562.634735] env[62510]: _type = "Task" [ 1562.634735] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.645830] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52811be2-53e6-b082-8317-4ac20aff8d66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.935488] env[62510]: DEBUG nova.compute.manager [req-cec607b6-d86a-4894-ab9d-7b9eb9e8e0a9 req-8bc74c64-b2a4-4206-9014-b75041d9bed8 service nova] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Received event network-vif-deleted-6a83554a-ba95-455d-8a32-16fec19f8c40 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1562.956715] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.984484] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e0016383-d5da-4037-b2a8-44b32ec35609 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "2d2ab209-8072-4e64-8170-50d96d71bc54" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.752s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.011299] env[62510]: DEBUG nova.compute.manager [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1563.011299] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1563.012012] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f0fc3b-cbb1-46b7-b446-6b17b2243ec5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.021619] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.021923] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1563.022206] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eece2f0a-353a-4ad5-b76a-363bed18694d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.030902] env[62510]: DEBUG oslo_vmware.api [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1563.030902] env[62510]: value = "task-1768652" [ 1563.030902] env[62510]: _type = "Task" [ 1563.030902] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.041197] env[62510]: DEBUG oslo_vmware.api [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.146132] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52811be2-53e6-b082-8317-4ac20aff8d66, 'name': SearchDatastore_Task, 'duration_secs': 0.012589} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.146435] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.146843] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1563.147021] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bbd6c16-fee5-49c4-8446-40609f9b1b50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.154162] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1563.154162] env[62510]: value = "task-1768653" [ 1563.154162] env[62510]: _type = "Task" [ 1563.154162] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.162893] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.548527] env[62510]: DEBUG oslo_vmware.api [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768652, 'name': PowerOffVM_Task, 'duration_secs': 0.236944} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.552010] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1563.552423] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1563.553054] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1ad5bc9-0851-46db-8e04-dcb9b87d0118 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.669797] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768653, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.934916] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.935293] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.950315] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54dc14e-9323-48fe-97bd-1a803ef92996 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.960132] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc46a9b-51ba-4c6e-914e-0e2a82b32812 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.995546] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b656e5cf-5267-4512-bbf5-22f5a916f2b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.004653] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b98c3ce-19de-4201-a7ae-80a8159aed08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.019900] env[62510]: DEBUG nova.compute.provider_tree [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.166306] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768653, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.927632} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.166592] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1564.166811] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1564.167087] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13221507-ee0c-4f68-bcbc-5092b8cce8bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.175131] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1564.175131] env[62510]: value = "task-1768655" [ 1564.175131] env[62510]: _type = "Task" [ 1564.175131] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.186291] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.319824] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "34a464e2-d38e-4c24-a487-c62a4f484667" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.319974] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "34a464e2-d38e-4c24-a487-c62a4f484667" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.320236] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "34a464e2-d38e-4c24-a487-c62a4f484667-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.320433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "34a464e2-d38e-4c24-a487-c62a4f484667-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.320656] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "34a464e2-d38e-4c24-a487-c62a4f484667-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.323103] env[62510]: INFO nova.compute.manager [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Terminating instance [ 1564.525040] env[62510]: DEBUG nova.scheduler.client.report [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1564.574442] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1564.574817] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1564.574817] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Deleting the datastore file [datastore1] 87d1d75e-41c4-42e6-bf58-deabb71400e1 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1564.575094] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d060101-f7ae-4223-8bec-06b768a38559 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.582646] env[62510]: DEBUG oslo_vmware.api [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for the task: (returnval){ [ 1564.582646] env[62510]: value = "task-1768656" [ 1564.582646] env[62510]: _type = "Task" [ 1564.582646] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.590183] env[62510]: DEBUG oslo_vmware.api [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768656, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.686038] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076995} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.686202] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1564.686951] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cf2095-65e2-4b8b-a6ed-a4a01c1b35df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.710192] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.710491] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75c8b363-320d-4f61-9f4e-3f85f5555367 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.732422] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1564.732422] env[62510]: value = "task-1768657" [ 1564.732422] env[62510]: _type = "Task" [ 1564.732422] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.740869] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768657, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.826724] env[62510]: DEBUG nova.compute.manager [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1564.826983] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1564.827980] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e247b6a-3498-4a2f-8eef-9fc92c257cbb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.837456] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1564.837770] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c88c4945-7195-46e0-81e7-fbbfda83a58b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.846991] env[62510]: DEBUG oslo_vmware.api [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1564.846991] env[62510]: value = "task-1768658" [ 1564.846991] env[62510]: _type = "Task" [ 1564.846991] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.865713] env[62510]: DEBUG oslo_vmware.api [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768658, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.030390] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.030797] env[62510]: DEBUG nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1565.033778] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.545s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.034009] env[62510]: DEBUG nova.objects.instance [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lazy-loading 'resources' on Instance uuid c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1565.092693] env[62510]: DEBUG oslo_vmware.api [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Task: {'id': task-1768656, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166203} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.092969] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1565.093273] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1565.093572] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1565.093861] env[62510]: INFO nova.compute.manager [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Took 2.08 seconds to destroy the instance on the hypervisor. [ 1565.094248] env[62510]: DEBUG oslo.service.loopingcall [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1565.094474] env[62510]: DEBUG nova.compute.manager [-] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1565.094573] env[62510]: DEBUG nova.network.neutron [-] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1565.250064] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768657, 'name': ReconfigVM_Task, 'duration_secs': 0.313453} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.250064] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1565.250064] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a47f9ce-4c8e-4276-b739-fbd2338f425e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.256700] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1565.256700] env[62510]: value = "task-1768659" [ 1565.256700] env[62510]: _type = "Task" [ 1565.256700] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.272834] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768659, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.359166] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-0029d975-bd48-4558-9f41-a0cf91336393-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.359851] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0029d975-bd48-4558-9f41-a0cf91336393-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.359851] env[62510]: DEBUG nova.objects.instance [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'flavor' on Instance uuid 0029d975-bd48-4558-9f41-a0cf91336393 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1565.361682] env[62510]: DEBUG oslo_vmware.api [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768658, 'name': PowerOffVM_Task, 'duration_secs': 0.26139} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.362249] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1565.362411] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1565.362727] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0044a1d3-5c3b-4a3a-9114-b4e8857bef51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.480738] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1565.481111] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1565.481427] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Deleting the datastore file [datastore1] 34a464e2-d38e-4c24-a487-c62a4f484667 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1565.481826] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41ce8414-3dc2-4ac3-8058-baa8f949ce38 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.490243] env[62510]: DEBUG oslo_vmware.api [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for the task: (returnval){ [ 1565.490243] env[62510]: value = "task-1768661" [ 1565.490243] env[62510]: _type = "Task" [ 1565.490243] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.498404] env[62510]: DEBUG oslo_vmware.api [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.538582] env[62510]: DEBUG nova.compute.utils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.543039] env[62510]: DEBUG nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1565.546069] env[62510]: DEBUG nova.network.neutron [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1565.597191] env[62510]: DEBUG nova.policy [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84838efea173462a8afea9205f10814e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e3cc351804874ef8bbb055920e2e4582', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1565.718604] env[62510]: DEBUG nova.compute.manager [req-116cfade-3178-4502-8681-b24fab9a4530 req-eb08bb11-bcd2-4261-a849-e5d663dd13a3 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received event network-vif-deleted-667e6924-4cc9-4a1a-b451-453ac0491b41 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1565.718781] env[62510]: INFO nova.compute.manager [req-116cfade-3178-4502-8681-b24fab9a4530 req-eb08bb11-bcd2-4261-a849-e5d663dd13a3 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Neutron deleted interface 667e6924-4cc9-4a1a-b451-453ac0491b41; detaching it from the instance and deleting it from the info cache [ 1565.719011] env[62510]: DEBUG nova.network.neutron [req-116cfade-3178-4502-8681-b24fab9a4530 req-eb08bb11-bcd2-4261-a849-e5d663dd13a3 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Updating instance_info_cache with network_info: [{"id": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "address": "fa:16:3e:ed:b1:3e", "network": {"id": "68e8cdd5-8257-4c44-ba46-30c907dc84a1", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-825081581", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.154", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "d33701c4eedd47268e1c8d16bd63de81", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c42bb08a-77b4-4bba-8166-702cbb1b5f1e", "external-id": "nsx-vlan-transportzone-137", "segmentation_id": 137, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc653bf1-54", "ovs_interfaceid": "fc653bf1-54d2-4e03-bb9f-b6486dd8ce76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.772386] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768659, 'name': Rename_Task, 'duration_secs': 0.207329} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.772962] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1565.772962] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d21eb006-f5fc-44b3-b43b-0fefbb898b0b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.780609] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1565.780609] env[62510]: value = "task-1768662" [ 1565.780609] env[62510]: _type = "Task" [ 1565.780609] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.797378] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768662, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.950744] env[62510]: DEBUG nova.objects.instance [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'pci_requests' on Instance uuid 0029d975-bd48-4558-9f41-a0cf91336393 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1566.003575] env[62510]: DEBUG oslo_vmware.api [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Task: {'id': task-1768661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.354409} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.003914] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1566.004047] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1566.004238] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1566.004426] env[62510]: INFO nova.compute.manager [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1566.004682] env[62510]: DEBUG oslo.service.loopingcall [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1566.004899] env[62510]: DEBUG nova.compute.manager [-] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1566.004997] env[62510]: DEBUG nova.network.neutron [-] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1566.009030] env[62510]: DEBUG nova.network.neutron [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Successfully created port: e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.042237] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092fe22e-31da-4b7d-a33d-1e58ff7f6b39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.046075] env[62510]: DEBUG nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1566.055599] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f573a34e-c1a1-4b59-a338-70d12c404176 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.090018] env[62510]: DEBUG nova.network.neutron [-] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.093326] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31dbe90-d648-4dc9-b423-4dbb5d8d9e41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.102644] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7251b565-eeeb-4c74-8532-2da7b782d040 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.120464] env[62510]: DEBUG nova.compute.provider_tree [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.231123] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d83c11a7-e45b-419c-9e55-3a88a0b7cd5e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.241916] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41d43de-c6f3-4b8d-9c6f-d915676e1d39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.290572] env[62510]: DEBUG nova.compute.manager [req-116cfade-3178-4502-8681-b24fab9a4530 req-eb08bb11-bcd2-4261-a849-e5d663dd13a3 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Detach interface failed, port_id=667e6924-4cc9-4a1a-b451-453ac0491b41, reason: Instance 87d1d75e-41c4-42e6-bf58-deabb71400e1 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1566.300319] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768662, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.453327] env[62510]: DEBUG nova.objects.base [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Object Instance<0029d975-bd48-4558-9f41-a0cf91336393> lazy-loaded attributes: flavor,pci_requests {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1566.453557] env[62510]: DEBUG nova.network.neutron [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1566.494819] env[62510]: DEBUG nova.policy [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1566.553769] env[62510]: INFO nova.virt.block_device [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Booting with volume b09085d6-32c6-49d0-a8fe-b4eb76d80b62 at /dev/sda [ 1566.591035] env[62510]: INFO nova.compute.manager [-] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Took 1.50 seconds to deallocate network for instance. [ 1566.596615] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-66a1c72b-ce7b-4d28-8ccb-a754bbae75b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.607913] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fd5aa2-79ab-4b46-946e-161ba1e8c904 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.625158] env[62510]: DEBUG nova.scheduler.client.report [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1566.644800] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-701c40f8-89a5-419d-bfbe-5ce9b572bfe2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.654495] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470608cf-d75f-4233-b59f-56fda95b3acb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.694496] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dc6e61-4eef-4949-8f47-a346ce4a51a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.703076] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be6d144-3573-4da6-94f7-7edbe1ce8636 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.719731] env[62510]: DEBUG nova.virt.block_device [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Updating existing volume attachment record: 918ea018-7a56-4282-bb05-45d1af5270d1 {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1566.789372] env[62510]: DEBUG nova.network.neutron [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Successfully created port: 37fc170f-1d32-4c6f-b871-74d459e02d76 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.801880] env[62510]: DEBUG oslo_vmware.api [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768662, 'name': PowerOnVM_Task, 'duration_secs': 0.672404} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.802363] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1566.802609] env[62510]: INFO nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Took 9.00 seconds to spawn the instance on the hypervisor. [ 1566.803139] env[62510]: DEBUG nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1566.803707] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbbf814-ae65-4d0e-9cf3-217d90574152 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.816890] env[62510]: DEBUG nova.network.neutron [-] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.100587] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.130126] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.096s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.132615] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.028s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.134797] env[62510]: INFO nova.compute.claims [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1567.156421] env[62510]: INFO nova.scheduler.client.report [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Deleted allocations for instance c7d875ee-2b9c-48e4-9bf9-f7602e75ec62 [ 1567.320446] env[62510]: INFO nova.compute.manager [-] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Took 1.32 seconds to deallocate network for instance. [ 1567.326479] env[62510]: INFO nova.compute.manager [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Took 39.67 seconds to build instance. [ 1567.665358] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0c302042-7aa8-4b32-a656-6eb23dd1977c tempest-ListServerFiltersTestJSON-608937213 tempest-ListServerFiltersTestJSON-608937213-project-member] Lock "c7d875ee-2b9c-48e4-9bf9-f7602e75ec62" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.858s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.738024] env[62510]: DEBUG nova.network.neutron [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Successfully updated port: e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1567.803336] env[62510]: DEBUG nova.compute.manager [req-ede09854-583b-4cc3-8fc1-19693251d567 req-62158f87-aa49-45c1-8372-a469cde7b5c3 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Received event network-vif-plugged-e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1567.803336] env[62510]: DEBUG oslo_concurrency.lockutils [req-ede09854-583b-4cc3-8fc1-19693251d567 req-62158f87-aa49-45c1-8372-a469cde7b5c3 service nova] Acquiring lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.803566] env[62510]: DEBUG oslo_concurrency.lockutils [req-ede09854-583b-4cc3-8fc1-19693251d567 req-62158f87-aa49-45c1-8372-a469cde7b5c3 service nova] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.803566] env[62510]: DEBUG oslo_concurrency.lockutils [req-ede09854-583b-4cc3-8fc1-19693251d567 req-62158f87-aa49-45c1-8372-a469cde7b5c3 service nova] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.803734] env[62510]: DEBUG nova.compute.manager [req-ede09854-583b-4cc3-8fc1-19693251d567 req-62158f87-aa49-45c1-8372-a469cde7b5c3 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] No waiting events found dispatching network-vif-plugged-e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1567.803888] env[62510]: WARNING nova.compute.manager [req-ede09854-583b-4cc3-8fc1-19693251d567 req-62158f87-aa49-45c1-8372-a469cde7b5c3 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Received unexpected event network-vif-plugged-e1df700d-6a97-4814-9a7d-e381d485b8b4 for instance with vm_state building and task_state block_device_mapping. [ 1567.829545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.829943] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e94d6fcf-fc35-4f34-97f9-5f98092fe484 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.947s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.986701] env[62510]: DEBUG nova.compute.manager [req-efca5734-3d31-40c0-bc7a-655ebf1c6ab5 req-37f8c35c-a9d3-44d9-b1d7-ab8630058406 service nova] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Received event network-vif-deleted-fc653bf1-54d2-4e03-bb9f-b6486dd8ce76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1567.986924] env[62510]: DEBUG nova.compute.manager [req-efca5734-3d31-40c0-bc7a-655ebf1c6ab5 req-37f8c35c-a9d3-44d9-b1d7-ab8630058406 service nova] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Received event network-vif-deleted-dba71e12-5d92-43ea-ba7a-3e8b1fc8d6cb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1568.241952] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Acquiring lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.242125] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Acquired lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.242305] env[62510]: DEBUG nova.network.neutron [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1568.334446] env[62510]: DEBUG nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1568.394990] env[62510]: DEBUG nova.network.neutron [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Successfully updated port: 37fc170f-1d32-4c6f-b871-74d459e02d76 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1568.650502] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5015235-356c-46c4-b8aa-fb274f02773a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.659952] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e9fb51-218f-4ae0-a8e5-8a635ba940a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.691678] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf988f2-4172-486b-a152-ceffdc12bb3e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.704973] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e19fcc-9f88-4342-8f19-8f1339fbf100 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.721027] env[62510]: DEBUG nova.compute.provider_tree [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.782456] env[62510]: DEBUG nova.network.neutron [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1568.838394] env[62510]: DEBUG nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1568.838940] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1568.839241] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1568.839522] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1568.839522] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1568.839661] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1568.839798] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1568.840107] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1568.840151] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1568.840317] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1568.840477] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1568.840672] env[62510]: DEBUG nova.virt.hardware [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1568.843631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5920c53-9eb8-4e75-9327-50f9caccd106 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.854794] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f19bf5-1cec-46ab-8a09-1c2516ffa26e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.859778] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.898218] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.898394] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.898594] env[62510]: DEBUG nova.network.neutron [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1568.953440] env[62510]: DEBUG nova.network.neutron [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Updating instance_info_cache with network_info: [{"id": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "address": "fa:16:3e:10:0d:4e", "network": {"id": "b210d504-9dc4-4f72-a51a-5a6f07287dfd", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-203898856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3cc351804874ef8bbb055920e2e4582", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1df700d-6a", "ovs_interfaceid": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.224420] env[62510]: DEBUG nova.scheduler.client.report [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1569.433653] env[62510]: WARNING nova.network.neutron [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] 940be04f-b555-4383-aaf8-63734d94a773 already exists in list: networks containing: ['940be04f-b555-4383-aaf8-63734d94a773']. ignoring it [ 1569.458136] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Releasing lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.458136] env[62510]: DEBUG nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Instance network_info: |[{"id": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "address": "fa:16:3e:10:0d:4e", "network": {"id": "b210d504-9dc4-4f72-a51a-5a6f07287dfd", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-203898856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3cc351804874ef8bbb055920e2e4582", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1df700d-6a", "ovs_interfaceid": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1569.458136] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:0d:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bfae3ef8-cae7-455d-8632-ba93e1671625', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1df700d-6a97-4814-9a7d-e381d485b8b4', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1569.465712] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Creating folder: Project (e3cc351804874ef8bbb055920e2e4582). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1569.465869] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99b3d1eb-ba59-4177-82fb-10257f0e1fdb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.483252] env[62510]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1569.483252] env[62510]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62510) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1569.483441] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Folder already exists: Project (e3cc351804874ef8bbb055920e2e4582). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1569.483673] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Creating folder: Instances. Parent ref: group-v367269. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1569.483926] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02019896-01a7-49b5-86de-5b6c1bc32af2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.497913] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Created folder: Instances in parent group-v367269. [ 1569.498172] env[62510]: DEBUG oslo.service.loopingcall [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.498399] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1569.498617] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dede2657-79e8-41cb-8f58-bd8dcded0a59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.520217] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1569.520217] env[62510]: value = "task-1768665" [ 1569.520217] env[62510]: _type = "Task" [ 1569.520217] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.531225] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768665, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.730431] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.730949] env[62510]: DEBUG nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1569.733583] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.468s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.735413] env[62510]: INFO nova.compute.claims [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1569.741036] env[62510]: DEBUG nova.network.neutron [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "37fc170f-1d32-4c6f-b871-74d459e02d76", "address": "fa:16:3e:63:ae:97", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37fc170f-1d", "ovs_interfaceid": "37fc170f-1d32-4c6f-b871-74d459e02d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.855705] env[62510]: DEBUG nova.compute.manager [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1569.951489] env[62510]: DEBUG nova.compute.manager [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Received event network-changed-e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1569.951770] env[62510]: DEBUG nova.compute.manager [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Refreshing instance network info cache due to event network-changed-e1df700d-6a97-4814-9a7d-e381d485b8b4. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1569.951988] env[62510]: DEBUG oslo_concurrency.lockutils [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] Acquiring lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.952147] env[62510]: DEBUG oslo_concurrency.lockutils [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] Acquired lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.952308] env[62510]: DEBUG nova.network.neutron [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Refreshing network info cache for port e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1570.030354] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768665, 'name': CreateVM_Task, 'duration_secs': 0.387405} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.030529] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1570.031221] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367276', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'name': 'volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '568ce58c-9ce5-4b40-988f-f31d8e0c376d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'serial': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62'}, 'attachment_id': '918ea018-7a56-4282-bb05-45d1af5270d1', 'mount_device': '/dev/sda', 'device_type': None, 'disk_bus': None, 'guest_format': None, 'boot_index': 0, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62510) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1570.032068] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Root volume attach. Driver type: vmdk {{(pid=62510) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1570.033038] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c846f6fa-6efd-4b58-b3b5-7dd019685c40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.040162] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801a6c68-b67f-4abe-9b25-7f7f07c5579d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.047025] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f98ec8-65aa-41dc-9e0a-c34b36aed22c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.052577] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-1fe755d0-a0fe-4817-9528-9072bba0c1f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.060191] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1570.060191] env[62510]: value = "task-1768666" [ 1570.060191] env[62510]: _type = "Task" [ 1570.060191] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.068113] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768666, 'name': RelocateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.156447] env[62510]: DEBUG nova.compute.manager [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received event network-vif-plugged-37fc170f-1d32-4c6f-b871-74d459e02d76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1570.156447] env[62510]: DEBUG oslo_concurrency.lockutils [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.156659] env[62510]: DEBUG oslo_concurrency.lockutils [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] Lock "0029d975-bd48-4558-9f41-a0cf91336393-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.156834] env[62510]: DEBUG oslo_concurrency.lockutils [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] Lock "0029d975-bd48-4558-9f41-a0cf91336393-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.157012] env[62510]: DEBUG nova.compute.manager [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] No waiting events found dispatching network-vif-plugged-37fc170f-1d32-4c6f-b871-74d459e02d76 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1570.157217] env[62510]: WARNING nova.compute.manager [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received unexpected event network-vif-plugged-37fc170f-1d32-4c6f-b871-74d459e02d76 for instance with vm_state active and task_state None. [ 1570.157393] env[62510]: DEBUG nova.compute.manager [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received event network-changed-37fc170f-1d32-4c6f-b871-74d459e02d76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1570.157572] env[62510]: DEBUG nova.compute.manager [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Refreshing instance network info cache due to event network-changed-37fc170f-1d32-4c6f-b871-74d459e02d76. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1570.157743] env[62510]: DEBUG oslo_concurrency.lockutils [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] Acquiring lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.241371] env[62510]: DEBUG nova.compute.utils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1570.244809] env[62510]: DEBUG nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1570.244988] env[62510]: DEBUG nova.network.neutron [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1570.247028] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.247571] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.247729] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.248196] env[62510]: DEBUG oslo_concurrency.lockutils [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] Acquired lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.248374] env[62510]: DEBUG nova.network.neutron [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Refreshing network info cache for port 37fc170f-1d32-4c6f-b871-74d459e02d76 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1570.249942] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d7f44b-40d6-426b-b3f1-a8d5a9f3483c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.272245] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1570.272245] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1570.272245] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1570.272442] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1570.273452] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1570.273452] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1570.273452] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1570.273452] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1570.273452] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1570.273452] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1570.273669] env[62510]: DEBUG nova.virt.hardware [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1570.280246] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Reconfiguring VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1570.280828] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c025185-99ae-4787-acd4-79da9ddb4218 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.295226] env[62510]: DEBUG nova.policy [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '444e79fb7f11488b8148a7329bbc6823', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ca23491c4194bee84d0e9be0b015342', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1570.303392] env[62510]: DEBUG oslo_vmware.api [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1570.303392] env[62510]: value = "task-1768667" [ 1570.303392] env[62510]: _type = "Task" [ 1570.303392] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.312462] env[62510]: DEBUG oslo_vmware.api [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768667, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.384856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.571278] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768666, 'name': RelocateVM_Task, 'duration_secs': 0.405752} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.573596] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1570.573806] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367276', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'name': 'volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '568ce58c-9ce5-4b40-988f-f31d8e0c376d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'serial': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1570.574676] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da663c9-e028-4e0b-a077-4a6805936880 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.594046] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dee9a27-1155-4fb0-aca3-dbe2d10bd024 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.617442] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62/volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1570.617791] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9acab378-f1b7-4ac9-90f7-02877a022ba7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.632804] env[62510]: DEBUG nova.network.neutron [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Successfully created port: d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1570.642477] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1570.642477] env[62510]: value = "task-1768668" [ 1570.642477] env[62510]: _type = "Task" [ 1570.642477] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.651114] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768668, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.746368] env[62510]: DEBUG nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1570.759932] env[62510]: DEBUG nova.network.neutron [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Updated VIF entry in instance network info cache for port e1df700d-6a97-4814-9a7d-e381d485b8b4. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1570.760303] env[62510]: DEBUG nova.network.neutron [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Updating instance_info_cache with network_info: [{"id": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "address": "fa:16:3e:10:0d:4e", "network": {"id": "b210d504-9dc4-4f72-a51a-5a6f07287dfd", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-203898856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3cc351804874ef8bbb055920e2e4582", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1df700d-6a", "ovs_interfaceid": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.819825] env[62510]: DEBUG oslo_vmware.api [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768667, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.051795] env[62510]: DEBUG nova.network.neutron [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updated VIF entry in instance network info cache for port 37fc170f-1d32-4c6f-b871-74d459e02d76. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1571.052257] env[62510]: DEBUG nova.network.neutron [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "37fc170f-1d32-4c6f-b871-74d459e02d76", "address": "fa:16:3e:63:ae:97", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap37fc170f-1d", "ovs_interfaceid": "37fc170f-1d32-4c6f-b871-74d459e02d76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.156125] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768668, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.245380] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3557bfa-0bfd-40c0-89ff-522d74719400 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.253621] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2207524b-7c3b-444d-8e89-265531a2b7fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.262551] env[62510]: DEBUG oslo_concurrency.lockutils [req-2dfcaabf-b890-47ca-8c91-24883c26e356 req-eed7e84b-74ce-4899-a22e-fb7ae059a314 service nova] Releasing lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.293634] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109cc444-9757-4251-9531-ab1f72c99448 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.302908] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-febb0527-0296-41ee-943e-1ffc853c0629 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.315811] env[62510]: DEBUG oslo_vmware.api [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768667, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.325499] env[62510]: DEBUG nova.compute.provider_tree [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.557282] env[62510]: DEBUG oslo_concurrency.lockutils [req-e076f765-0f11-4970-8321-29b6d7d4799c req-87db46f8-9420-41d8-91b9-55540e9b509b service nova] Releasing lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.657180] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768668, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.763425] env[62510]: DEBUG nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1571.788935] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1571.789066] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1571.789238] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1571.789426] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1571.789573] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1571.789771] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1571.790048] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1571.790225] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1571.790404] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1571.790566] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1571.790737] env[62510]: DEBUG nova.virt.hardware [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1571.792014] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7942652-26d8-49ea-8649-698748d8a8bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.801110] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf282d7-cfac-445f-a8e5-04f9a41f023e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.823969] env[62510]: DEBUG oslo_vmware.api [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768667, 'name': ReconfigVM_Task, 'duration_secs': 1.172573} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.824475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.824693] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Reconfigured VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1571.830975] env[62510]: DEBUG nova.scheduler.client.report [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1572.137991] env[62510]: DEBUG nova.network.neutron [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Successfully updated port: d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1572.158369] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768668, 'name': ReconfigVM_Task, 'duration_secs': 1.346239} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.158572] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Reconfigured VM instance instance-0000002f to attach disk [datastore1] volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62/volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1572.163542] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7bc2ecf-e7cc-4629-aabb-7fbc1ade13b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.180899] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1572.180899] env[62510]: value = "task-1768669" [ 1572.180899] env[62510]: _type = "Task" [ 1572.180899] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.189708] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.210995] env[62510]: DEBUG nova.compute.manager [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Received event network-vif-plugged-d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1572.211187] env[62510]: DEBUG oslo_concurrency.lockutils [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] Acquiring lock "e3850272-9dae-4164-8f0e-f5513af23f49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.211431] env[62510]: DEBUG oslo_concurrency.lockutils [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] Lock "e3850272-9dae-4164-8f0e-f5513af23f49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.211609] env[62510]: DEBUG oslo_concurrency.lockutils [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] Lock "e3850272-9dae-4164-8f0e-f5513af23f49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.211797] env[62510]: DEBUG nova.compute.manager [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] No waiting events found dispatching network-vif-plugged-d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1572.211979] env[62510]: WARNING nova.compute.manager [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Received unexpected event network-vif-plugged-d769f744-6168-49b4-b195-b608fec386e8 for instance with vm_state building and task_state spawning. [ 1572.212161] env[62510]: DEBUG nova.compute.manager [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Received event network-changed-d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1572.212330] env[62510]: DEBUG nova.compute.manager [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Refreshing instance network info cache due to event network-changed-d769f744-6168-49b4-b195-b608fec386e8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1572.213026] env[62510]: DEBUG oslo_concurrency.lockutils [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] Acquiring lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.213026] env[62510]: DEBUG oslo_concurrency.lockutils [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] Acquired lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.213026] env[62510]: DEBUG nova.network.neutron [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Refreshing network info cache for port d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1572.330846] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b0e03ac-c021-4aab-a239-76e96436ba5a tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0029d975-bd48-4558-9f41-a0cf91336393-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.971s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.334349] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.601s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.334827] env[62510]: DEBUG nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1572.337386] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.083s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.338777] env[62510]: INFO nova.compute.claims [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1572.639936] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.690974] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768669, 'name': ReconfigVM_Task, 'duration_secs': 0.132158} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.691304] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367276', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'name': 'volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '568ce58c-9ce5-4b40-988f-f31d8e0c376d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'serial': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1572.691868] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca1c250e-f152-435a-8804-20d117dd678c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.699336] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1572.699336] env[62510]: value = "task-1768670" [ 1572.699336] env[62510]: _type = "Task" [ 1572.699336] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.708089] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768670, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.746589] env[62510]: DEBUG nova.network.neutron [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1572.829644] env[62510]: DEBUG nova.network.neutron [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.843043] env[62510]: DEBUG nova.compute.utils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1572.846571] env[62510]: DEBUG nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1572.846752] env[62510]: DEBUG nova.network.neutron [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1572.892151] env[62510]: DEBUG nova.policy [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da50a3ca3cd14a109573a5f5da2ceef0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c995c2427bd4f7da644d0a8df7d69da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1573.146514] env[62510]: DEBUG nova.network.neutron [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Successfully created port: 766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1573.210780] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768670, 'name': Rename_Task, 'duration_secs': 0.15581} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.211203] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1573.211351] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90e7e4d4-0558-4947-a582-234d424f7f50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.220052] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1573.220052] env[62510]: value = "task-1768671" [ 1573.220052] env[62510]: _type = "Task" [ 1573.220052] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.228691] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.332449] env[62510]: DEBUG oslo_concurrency.lockutils [req-710f60b1-e278-4591-8ae2-9e3dc2f947bf req-1ba6438e-b453-4e5c-9bf5-c70dbef33138 service nova] Releasing lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.332843] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.333024] env[62510]: DEBUG nova.network.neutron [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1573.348047] env[62510]: DEBUG nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1573.732660] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768671, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.811720] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14703735-66cb-4bb2-9daf-a803ca38affb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.820121] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b5144e-9259-487c-97ab-7033605391de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.862012] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54881879-5914-4cd6-a37f-1e94838fc6ef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.865018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-0029d975-bd48-4558-9f41-a0cf91336393-37fc170f-1d32-4c6f-b871-74d459e02d76" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.865273] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0029d975-bd48-4558-9f41-a0cf91336393-37fc170f-1d32-4c6f-b871-74d459e02d76" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.874023] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f38de4f-03e2-427b-ac5a-aa2000d3bf4f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.889831] env[62510]: DEBUG nova.compute.provider_tree [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.893091] env[62510]: DEBUG nova.network.neutron [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1574.234436] env[62510]: DEBUG oslo_vmware.api [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1768671, 'name': PowerOnVM_Task, 'duration_secs': 0.609357} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.234814] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1574.235113] env[62510]: INFO nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Took 5.40 seconds to spawn the instance on the hypervisor. [ 1574.235502] env[62510]: DEBUG nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1574.236525] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fcb16f-0499-4aba-b495-616d16075cc0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.333141] env[62510]: DEBUG nova.network.neutron [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Updating instance_info_cache with network_info: [{"id": "d769f744-6168-49b4-b195-b608fec386e8", "address": "fa:16:3e:4e:6e:a6", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd769f744-61", "ovs_interfaceid": "d769f744-6168-49b4-b195-b608fec386e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.362818] env[62510]: DEBUG nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1574.368062] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.368244] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.369479] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b3abd6-c8ef-422d-84c5-c8d9312957a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.389478] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd91d8e7-da75-4433-8a87-7e0a0a2abe7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.393417] env[62510]: DEBUG nova.scheduler.client.report [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1574.415735] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.415974] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.416149] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.416361] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.416531] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.416686] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.418450] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.418450] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.418450] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.418450] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.418450] env[62510]: DEBUG nova.virt.hardware [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.423688] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Reconfiguring VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1574.424455] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6a8b0d-00ce-4aab-b828-318441be4bd8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.428372] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.091s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.428855] env[62510]: DEBUG nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1574.431683] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d14f2e6-8270-4f93-b014-a0d7b8168b73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.444555] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.941s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.446285] env[62510]: INFO nova.compute.claims [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1574.455863] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6312106-0ac1-429c-af0a-ce97d32c1644 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.462054] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1574.462054] env[62510]: value = "task-1768672" [ 1574.462054] env[62510]: _type = "Task" [ 1574.462054] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.481392] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.629795] env[62510]: DEBUG nova.compute.manager [req-e2b45b65-e03d-4e5b-9f21-e6729eb183f0 req-8e74920e-175f-4d23-8057-64b12676a898 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Received event network-vif-plugged-766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1574.630062] env[62510]: DEBUG oslo_concurrency.lockutils [req-e2b45b65-e03d-4e5b-9f21-e6729eb183f0 req-8e74920e-175f-4d23-8057-64b12676a898 service nova] Acquiring lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.630392] env[62510]: DEBUG oslo_concurrency.lockutils [req-e2b45b65-e03d-4e5b-9f21-e6729eb183f0 req-8e74920e-175f-4d23-8057-64b12676a898 service nova] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.630687] env[62510]: DEBUG oslo_concurrency.lockutils [req-e2b45b65-e03d-4e5b-9f21-e6729eb183f0 req-8e74920e-175f-4d23-8057-64b12676a898 service nova] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.631279] env[62510]: DEBUG nova.compute.manager [req-e2b45b65-e03d-4e5b-9f21-e6729eb183f0 req-8e74920e-175f-4d23-8057-64b12676a898 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] No waiting events found dispatching network-vif-plugged-766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1574.631279] env[62510]: WARNING nova.compute.manager [req-e2b45b65-e03d-4e5b-9f21-e6729eb183f0 req-8e74920e-175f-4d23-8057-64b12676a898 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Received unexpected event network-vif-plugged-766401c7-3f55-48f7-a695-d2db7a829ade for instance with vm_state building and task_state spawning. [ 1574.684131] env[62510]: DEBUG nova.network.neutron [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Successfully updated port: 766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1574.766343] env[62510]: INFO nova.compute.manager [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Took 37.29 seconds to build instance. [ 1574.835723] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Releasing lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.836074] env[62510]: DEBUG nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Instance network_info: |[{"id": "d769f744-6168-49b4-b195-b608fec386e8", "address": "fa:16:3e:4e:6e:a6", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd769f744-61", "ovs_interfaceid": "d769f744-6168-49b4-b195-b608fec386e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1574.836585] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:6e:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd769f744-6168-49b4-b195-b608fec386e8', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1574.844275] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Creating folder: Project (5ca23491c4194bee84d0e9be0b015342). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1574.844550] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c752882-8745-493f-9861-51c98699bf1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.860226] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Created folder: Project (5ca23491c4194bee84d0e9be0b015342) in parent group-v367197. [ 1574.861898] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Creating folder: Instances. Parent ref: group-v367325. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1574.861898] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8284bb47-50ba-41c5-bee5-c1bda74da279 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.871101] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Created folder: Instances in parent group-v367325. [ 1574.871360] env[62510]: DEBUG oslo.service.loopingcall [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1574.871592] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1574.871812] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ebc4f9d-65c9-4d8b-a1cf-3acb7c7de608 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.893725] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1574.893725] env[62510]: value = "task-1768675" [ 1574.893725] env[62510]: _type = "Task" [ 1574.893725] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.906693] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768675, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.950938] env[62510]: DEBUG nova.compute.utils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1574.954478] env[62510]: DEBUG nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1574.954770] env[62510]: DEBUG nova.network.neutron [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1574.974977] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.022318] env[62510]: DEBUG nova.policy [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a406a2bf0ccd4b99ba7dcb359a9b640e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e144c0bd2d124193a65ad53de8c43039', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1575.186423] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.186620] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.186836] env[62510]: DEBUG nova.network.neutron [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.269069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-453cd80f-8640-490d-9524-124f1b0bce3f tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.825s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.411272] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768675, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.418830] env[62510]: DEBUG nova.network.neutron [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Successfully created port: 8597503b-d757-44ff-91a7-6f52b3b75aa3 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1575.464031] env[62510]: DEBUG nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1575.475391] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.773387] env[62510]: DEBUG nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1575.783063] env[62510]: DEBUG nova.network.neutron [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1575.908549] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768675, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.982717] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.063855] env[62510]: DEBUG nova.network.neutron [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updating instance_info_cache with network_info: [{"id": "766401c7-3f55-48f7-a695-d2db7a829ade", "address": "fa:16:3e:c4:51:1d", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766401c7-3f", "ovs_interfaceid": "766401c7-3f55-48f7-a695-d2db7a829ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.081537] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b8bd46-1a91-4038-b98f-11c1dfc9567f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.090993] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74bdee3-30aa-4bd8-bfe4-be4f387ac5e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.122469] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6551d707-c824-4887-a74e-fe581804a448 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.131140] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bea027-888d-4904-8f10-3891326807e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.146821] env[62510]: DEBUG nova.compute.provider_tree [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.299879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.407413] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768675, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.476769] env[62510]: DEBUG nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1576.479187] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.507046] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1576.507359] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1576.507522] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1576.507706] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1576.507865] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1576.508035] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1576.508289] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1576.508551] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1576.508781] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1576.508994] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1576.509223] env[62510]: DEBUG nova.virt.hardware [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1576.510178] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9280aae-684d-4910-bea7-d472b0f71562 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.519278] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b85d7e-a9b5-424d-84d0-a79fc3dc8f8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.566366] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Releasing lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.566735] env[62510]: DEBUG nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance network_info: |[{"id": "766401c7-3f55-48f7-a695-d2db7a829ade", "address": "fa:16:3e:c4:51:1d", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766401c7-3f", "ovs_interfaceid": "766401c7-3f55-48f7-a695-d2db7a829ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1576.567221] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:51:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e2153f70-3d14-42ab-8bb3-be78296dd3b8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '766401c7-3f55-48f7-a695-d2db7a829ade', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1576.575820] env[62510]: DEBUG oslo.service.loopingcall [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1576.576075] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1576.576383] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e98a7e36-4611-41ce-bc93-912a62b2a5ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.598388] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1576.598388] env[62510]: value = "task-1768676" [ 1576.598388] env[62510]: _type = "Task" [ 1576.598388] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.608437] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768676, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.650134] env[62510]: DEBUG nova.scheduler.client.report [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1576.663413] env[62510]: DEBUG nova.compute.manager [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Received event network-changed-766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1576.663743] env[62510]: DEBUG nova.compute.manager [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Refreshing instance network info cache due to event network-changed-766401c7-3f55-48f7-a695-d2db7a829ade. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1576.663956] env[62510]: DEBUG oslo_concurrency.lockutils [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] Acquiring lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.663956] env[62510]: DEBUG oslo_concurrency.lockutils [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] Acquired lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.664151] env[62510]: DEBUG nova.network.neutron [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Refreshing network info cache for port 766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1576.911061] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768675, 'name': CreateVM_Task, 'duration_secs': 1.713537} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.911382] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1576.912327] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.912565] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.913036] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1576.913439] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b923df5-fbf8-4a78-9489-1541b783dd8b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.920209] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1576.920209] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521e1f38-acb5-bad5-109a-3f4c4620f66e" [ 1576.920209] env[62510]: _type = "Task" [ 1576.920209] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.933907] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521e1f38-acb5-bad5-109a-3f4c4620f66e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.975665] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.030359] env[62510]: DEBUG nova.network.neutron [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Successfully updated port: 8597503b-d757-44ff-91a7-6f52b3b75aa3 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1577.109107] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768676, 'name': CreateVM_Task, 'duration_secs': 0.404402} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.109294] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1577.109940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.156108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.156688] env[62510]: DEBUG nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1577.159975] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.158s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.433080] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521e1f38-acb5-bad5-109a-3f4c4620f66e, 'name': SearchDatastore_Task, 'duration_secs': 0.013896} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.433080] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.433308] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1577.433495] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.433645] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.433825] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1577.434123] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.434450] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1577.434704] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-529861b8-a98d-422c-9f4d-575069a21099 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.436682] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2550e9b4-ab32-4420-a31e-2b5ee00ffd7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.443092] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1577.443092] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5201396f-f572-6cc6-3a58-9ff118ed1e8e" [ 1577.443092] env[62510]: _type = "Task" [ 1577.443092] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.448208] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1577.448420] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1577.449440] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-917a46a9-2e4e-4a46-a556-6309f1e7038e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.454744] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5201396f-f572-6cc6-3a58-9ff118ed1e8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.458221] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1577.458221] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525f161e-beb5-b225-63cb-c08fc92d73b1" [ 1577.458221] env[62510]: _type = "Task" [ 1577.458221] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.470025] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525f161e-beb5-b225-63cb-c08fc92d73b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.476362] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.533028] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.533278] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.533532] env[62510]: DEBUG nova.network.neutron [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1577.574660] env[62510]: DEBUG nova.network.neutron [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updated VIF entry in instance network info cache for port 766401c7-3f55-48f7-a695-d2db7a829ade. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1577.575012] env[62510]: DEBUG nova.network.neutron [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updating instance_info_cache with network_info: [{"id": "766401c7-3f55-48f7-a695-d2db7a829ade", "address": "fa:16:3e:c4:51:1d", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766401c7-3f", "ovs_interfaceid": "766401c7-3f55-48f7-a695-d2db7a829ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.662858] env[62510]: DEBUG nova.compute.utils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1577.674826] env[62510]: DEBUG nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1577.675048] env[62510]: DEBUG nova.network.neutron [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1577.728451] env[62510]: DEBUG nova.policy [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29251dc4243a44669bae0609008a88e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f85ce3c02964d36a77221ba8235978c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1577.955182] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5201396f-f572-6cc6-3a58-9ff118ed1e8e, 'name': SearchDatastore_Task, 'duration_secs': 0.019476} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.955524] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.955799] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1577.956028] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.969048] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525f161e-beb5-b225-63cb-c08fc92d73b1, 'name': SearchDatastore_Task, 'duration_secs': 0.012363} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.969913] env[62510]: DEBUG nova.network.neutron [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Successfully created port: 13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1577.975262] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc70f4cf-618e-4f1c-a25a-4ad889868efb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.982090] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1577.982090] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523355d1-1c2f-df04-fa2a-9c0b0a790db1" [ 1577.982090] env[62510]: _type = "Task" [ 1577.982090] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.985514] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.993591] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523355d1-1c2f-df04-fa2a-9c0b0a790db1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.064895] env[62510]: DEBUG nova.network.neutron [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1578.077581] env[62510]: DEBUG oslo_concurrency.lockutils [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] Releasing lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.077858] env[62510]: DEBUG nova.compute.manager [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Received event network-changed-e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1578.078044] env[62510]: DEBUG nova.compute.manager [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Refreshing instance network info cache due to event network-changed-e1df700d-6a97-4814-9a7d-e381d485b8b4. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1578.078259] env[62510]: DEBUG oslo_concurrency.lockutils [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] Acquiring lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.078411] env[62510]: DEBUG oslo_concurrency.lockutils [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] Acquired lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.078608] env[62510]: DEBUG nova.network.neutron [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Refreshing network info cache for port e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1578.175855] env[62510]: DEBUG nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1578.181745] env[62510]: INFO nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating resource usage from migration 8d1549df-6eeb-4b96-9648-01fd9071d48d [ 1578.315761] env[62510]: DEBUG nova.network.neutron [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance_info_cache with network_info: [{"id": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "address": "fa:16:3e:bd:11:3e", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8597503b-d7", "ovs_interfaceid": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.381329] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 612e95d6-28ef-4c9a-b5d9-fd83122bfa44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.381603] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8ffa27e9-6a3b-48d1-aed4-c808089788d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.381852] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.383251] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 34a464e2-d38e-4c24-a487-c62a4f484667 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1578.384080] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8bbafd7f-cdd1-4246-a509-2f97a6f78497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384080] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 12768001-6ed0-47be-8f20-c59ee82b842a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384080] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384080] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 7cc6d4a6-2765-44e7-b378-e213a562593d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384080] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance b7c2c768-573b-4c1c-ade7-45fb87b95d41 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384080] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0029d975-bd48-4558-9f41-a0cf91336393 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384080] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1578.384626] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance b004fba7-13e0-40f0-827d-8d09b7717176 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384626] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 58e71d67-aed2-4329-ab60-4dfacff1d0a2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1578.384626] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 3533a113-6f46-4b18-872d-9bc1b0481969 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384626] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 83fa0d32-18ee-401d-af0b-a0adb538e5f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384626] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2c5c38c1-511f-4aae-969a-eb6de128fae7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384996] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2c5d137d-4fd5-4035-a04f-bdb76e90edd7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1578.384996] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9d5d29ea-be92-4881-9fc8-fea3f2f442d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.384996] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f9eb5110-28ec-474e-b80e-0bfcee51483d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1578.385139] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 87d1d75e-41c4-42e6-bf58-deabb71400e1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1578.385294] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.385809] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 568ce58c-9ce5-4b40-988f-f31d8e0c376d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.385809] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e3850272-9dae-4164-8f0e-f5513af23f49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.385809] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance bd21dd81-c0d9-4ff1-9183-0b4622dc5afb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.385809] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 350d5f83-d9ce-4997-bf57-70c4a4e22ba0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.385809] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e7daad63-c802-4a86-bead-7e849064ed61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1578.479996] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.496093] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523355d1-1c2f-df04-fa2a-9c0b0a790db1, 'name': SearchDatastore_Task, 'duration_secs': 0.037677} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.496093] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.496093] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e3850272-9dae-4164-8f0e-f5513af23f49/e3850272-9dae-4164-8f0e-f5513af23f49.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1578.496093] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.496424] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1578.496424] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a1aa352-8c22-440f-af83-cf2d3063c2a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.498556] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c08e9f6-a171-418a-8146-42e70dbc76d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.505927] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1578.505927] env[62510]: value = "task-1768677" [ 1578.505927] env[62510]: _type = "Task" [ 1578.505927] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.510175] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1578.510333] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1578.511413] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eefe053-7712-4767-8e6b-f94d84e59f62 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.516782] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768677, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.520059] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1578.520059] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c8855b-9153-d6b3-3565-20bc26d60619" [ 1578.520059] env[62510]: _type = "Task" [ 1578.520059] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.542522] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c8855b-9153-d6b3-3565-20bc26d60619, 'name': SearchDatastore_Task} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.543283] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08bc2126-611e-4de2-8687-690dc136ad4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.549102] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1578.549102] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c6dec0-0a9c-72ce-14f5-9349263567ad" [ 1578.549102] env[62510]: _type = "Task" [ 1578.549102] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.556709] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c6dec0-0a9c-72ce-14f5-9349263567ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.702289] env[62510]: DEBUG nova.compute.manager [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Received event network-vif-plugged-8597503b-d757-44ff-91a7-6f52b3b75aa3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1578.702289] env[62510]: DEBUG oslo_concurrency.lockutils [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] Acquiring lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.702289] env[62510]: DEBUG oslo_concurrency.lockutils [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.702289] env[62510]: DEBUG oslo_concurrency.lockutils [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.702988] env[62510]: DEBUG nova.compute.manager [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] No waiting events found dispatching network-vif-plugged-8597503b-d757-44ff-91a7-6f52b3b75aa3 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1578.703388] env[62510]: WARNING nova.compute.manager [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Received unexpected event network-vif-plugged-8597503b-d757-44ff-91a7-6f52b3b75aa3 for instance with vm_state building and task_state spawning. [ 1578.703754] env[62510]: DEBUG nova.compute.manager [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Received event network-changed-8597503b-d757-44ff-91a7-6f52b3b75aa3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1578.704085] env[62510]: DEBUG nova.compute.manager [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Refreshing instance network info cache due to event network-changed-8597503b-d757-44ff-91a7-6f52b3b75aa3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1578.704404] env[62510]: DEBUG oslo_concurrency.lockutils [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] Acquiring lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.818465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.819241] env[62510]: DEBUG nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Instance network_info: |[{"id": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "address": "fa:16:3e:bd:11:3e", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8597503b-d7", "ovs_interfaceid": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1578.819734] env[62510]: DEBUG oslo_concurrency.lockutils [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] Acquired lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.820124] env[62510]: DEBUG nova.network.neutron [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Refreshing network info cache for port 8597503b-d757-44ff-91a7-6f52b3b75aa3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1578.822955] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:11:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8597503b-d757-44ff-91a7-6f52b3b75aa3', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1578.834479] env[62510]: DEBUG oslo.service.loopingcall [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1578.835974] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1578.836541] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b7d9a0f-9682-4c90-99b0-52d005fbda64 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.862723] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1578.862723] env[62510]: value = "task-1768678" [ 1578.862723] env[62510]: _type = "Task" [ 1578.862723] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.872522] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768678, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.889971] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1578.934745] env[62510]: DEBUG nova.network.neutron [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Updated VIF entry in instance network info cache for port e1df700d-6a97-4814-9a7d-e381d485b8b4. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.935194] env[62510]: DEBUG nova.network.neutron [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Updating instance_info_cache with network_info: [{"id": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "address": "fa:16:3e:10:0d:4e", "network": {"id": "b210d504-9dc4-4f72-a51a-5a6f07287dfd", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-203898856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e3cc351804874ef8bbb055920e2e4582", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bfae3ef8-cae7-455d-8632-ba93e1671625", "external-id": "cl2-zone-841", "segmentation_id": 841, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1df700d-6a", "ovs_interfaceid": "e1df700d-6a97-4814-9a7d-e381d485b8b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.981998] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.018831] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768677, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482471} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.018831] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e3850272-9dae-4164-8f0e-f5513af23f49/e3850272-9dae-4164-8f0e-f5513af23f49.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1579.018831] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1579.018831] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01f7928b-0ff4-4128-861a-58ccc8d7d536 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.025827] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1579.025827] env[62510]: value = "task-1768679" [ 1579.025827] env[62510]: _type = "Task" [ 1579.025827] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.035107] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768679, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.061576] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c6dec0-0a9c-72ce-14f5-9349263567ad, 'name': SearchDatastore_Task, 'duration_secs': 0.00876} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.061866] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.063028] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bd21dd81-c0d9-4ff1-9183-0b4622dc5afb/bd21dd81-c0d9-4ff1-9183-0b4622dc5afb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1579.063028] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8943d5fc-8d74-472c-9409-dc377740529d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.071338] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1579.071338] env[62510]: value = "task-1768680" [ 1579.071338] env[62510]: _type = "Task" [ 1579.071338] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.082648] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768680, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.191653] env[62510]: DEBUG nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1579.223649] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1579.223984] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1579.224112] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1579.224392] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1579.224605] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1579.224665] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1579.224859] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1579.225090] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1579.225281] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1579.225498] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1579.225776] env[62510]: DEBUG nova.virt.hardware [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1579.226943] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacb1bbd-124a-487c-9a35-275c3bfe43a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.237757] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b43da2a-a4b1-448c-8fbf-e74ba5941572 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.375041] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768678, 'name': CreateVM_Task, 'duration_secs': 0.411614} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.377789] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1579.377789] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.377789] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.377789] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1579.377789] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a4e438c-c40c-476f-b3cf-12c833f88478 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.387061] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1579.387061] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5216338b-29ce-4719-4725-5821f97b5e4a" [ 1579.387061] env[62510]: _type = "Task" [ 1579.387061] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.397257] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance fa43a538-1aae-4642-8370-70f2a49ca92c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1579.399928] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5216338b-29ce-4719-4725-5821f97b5e4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.438448] env[62510]: DEBUG oslo_concurrency.lockutils [req-039b241a-286a-4213-b0df-bc45fd32d33f req-0388122c-b7c1-408b-8a7a-d25930d8a6d5 service nova] Releasing lock "refresh_cache-568ce58c-9ce5-4b40-988f-f31d8e0c376d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.485747] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.517952] env[62510]: DEBUG nova.network.neutron [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Successfully updated port: 13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1579.545531] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768679, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066606} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.548560] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1579.550735] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d375c93-e4ab-40f5-81e9-8b4ed8547a72 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.576612] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] e3850272-9dae-4164-8f0e-f5513af23f49/e3850272-9dae-4164-8f0e-f5513af23f49.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1579.577259] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff6126b4-c094-4193-baaa-474b69848168 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.604660] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1579.604660] env[62510]: value = "task-1768681" [ 1579.604660] env[62510]: _type = "Task" [ 1579.604660] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.607553] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768680, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467248} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.611025] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bd21dd81-c0d9-4ff1-9183-0b4622dc5afb/bd21dd81-c0d9-4ff1-9183-0b4622dc5afb.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1579.611155] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1579.611402] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-110924da-74c8-4d93-9149-80a9ce144fa9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.619338] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768681, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.620605] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1579.620605] env[62510]: value = "task-1768682" [ 1579.620605] env[62510]: _type = "Task" [ 1579.620605] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.628853] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768682, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.668851] env[62510]: DEBUG nova.network.neutron [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updated VIF entry in instance network info cache for port 8597503b-d757-44ff-91a7-6f52b3b75aa3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1579.669346] env[62510]: DEBUG nova.network.neutron [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance_info_cache with network_info: [{"id": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "address": "fa:16:3e:bd:11:3e", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8597503b-d7", "ovs_interfaceid": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.896557] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5216338b-29ce-4719-4725-5821f97b5e4a, 'name': SearchDatastore_Task, 'duration_secs': 0.057754} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.896557] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.896743] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1579.896969] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.897131] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.897311] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1579.897590] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-681acd34-631c-4b08-96f3-5d887b63447e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.905236] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 13cdba63-5db4-419f-9e0b-244832d7866b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1579.907392] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.907571] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1579.908495] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92953c21-0891-4c48-9112-aa1120e98726 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.914576] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1579.914576] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520dc18e-32f8-3be2-0aca-47aee5141a6b" [ 1579.914576] env[62510]: _type = "Task" [ 1579.914576] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.923249] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520dc18e-32f8-3be2-0aca-47aee5141a6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.982887] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.022305] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.022388] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.022615] env[62510]: DEBUG nova.network.neutron [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1580.118520] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.131046] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768682, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066323} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.131046] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1580.131244] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186acf15-d01c-4ad3-b319-825c3b011853 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.154201] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] bd21dd81-c0d9-4ff1-9183-0b4622dc5afb/bd21dd81-c0d9-4ff1-9183-0b4622dc5afb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1580.154569] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a3c9548-2a5e-4770-8387-bf4e0d4b2917 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.172514] env[62510]: DEBUG oslo_concurrency.lockutils [req-061ec129-9504-4ab2-835c-3f8eb7c0be82 req-1a16ab7e-bd30-49f1-9ced-12ba93740556 service nova] Releasing lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.176425] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1580.176425] env[62510]: value = "task-1768683" [ 1580.176425] env[62510]: _type = "Task" [ 1580.176425] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.189973] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768683, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.410323] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 641628d1-bb6d-4207-89b9-98014328e028 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.426153] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520dc18e-32f8-3be2-0aca-47aee5141a6b, 'name': SearchDatastore_Task, 'duration_secs': 0.009616} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.427279] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6315ab8f-d46e-49ab-9187-5830ebaee8f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.433869] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1580.433869] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52670325-1b61-f946-38bc-f8250308cdd4" [ 1580.433869] env[62510]: _type = "Task" [ 1580.433869] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.445079] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52670325-1b61-f946-38bc-f8250308cdd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.484603] env[62510]: DEBUG oslo_vmware.api [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768672, 'name': ReconfigVM_Task, 'duration_secs': 5.777256} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.484852] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.485156] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Reconfigured VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1580.557524] env[62510]: DEBUG nova.network.neutron [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1580.619257] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.687218] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768683, 'name': ReconfigVM_Task, 'duration_secs': 0.45666} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.687528] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Reconfigured VM instance instance-00000031 to attach disk [datastore1] bd21dd81-c0d9-4ff1-9183-0b4622dc5afb/bd21dd81-c0d9-4ff1-9183-0b4622dc5afb.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1580.688506] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c353dda-cc50-4642-8600-5c51cd9c702d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.695705] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1580.695705] env[62510]: value = "task-1768684" [ 1580.695705] env[62510]: _type = "Task" [ 1580.695705] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.704535] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768684, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.705488] env[62510]: DEBUG nova.network.neutron [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fb40b1-13", "ovs_interfaceid": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.734353] env[62510]: DEBUG nova.compute.manager [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received event network-vif-plugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1580.734517] env[62510]: DEBUG oslo_concurrency.lockutils [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.734725] env[62510]: DEBUG oslo_concurrency.lockutils [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.734871] env[62510]: DEBUG oslo_concurrency.lockutils [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.735284] env[62510]: DEBUG nova.compute.manager [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] No waiting events found dispatching network-vif-plugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1580.735516] env[62510]: WARNING nova.compute.manager [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received unexpected event network-vif-plugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 for instance with vm_state building and task_state spawning. [ 1580.735685] env[62510]: DEBUG nova.compute.manager [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received event network-changed-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1580.735863] env[62510]: DEBUG nova.compute.manager [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Refreshing instance network info cache due to event network-changed-13fb40b1-132b-407d-b6e0-eec141ae88a8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1580.736055] env[62510]: DEBUG oslo_concurrency.lockutils [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] Acquiring lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.847838] env[62510]: DEBUG nova.compute.manager [req-3353f85c-e919-4c38-83af-f5cd3102d244 req-4432a084-148c-412e-ac09-215762f0161f service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received event network-vif-deleted-37fc170f-1d32-4c6f-b871-74d459e02d76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1580.848195] env[62510]: INFO nova.compute.manager [req-3353f85c-e919-4c38-83af-f5cd3102d244 req-4432a084-148c-412e-ac09-215762f0161f service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Neutron deleted interface 37fc170f-1d32-4c6f-b871-74d459e02d76; detaching it from the instance and deleting it from the info cache [ 1580.848492] env[62510]: DEBUG nova.network.neutron [req-3353f85c-e919-4c38-83af-f5cd3102d244 req-4432a084-148c-412e-ac09-215762f0161f service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.913553] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance b5ff2a10-3c76-469a-86e0-ed3b135bca37 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1580.945045] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52670325-1b61-f946-38bc-f8250308cdd4, 'name': SearchDatastore_Task, 'duration_secs': 0.013325} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.945320] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.945597] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 350d5f83-d9ce-4997-bf57-70c4a4e22ba0/350d5f83-d9ce-4997-bf57-70c4a4e22ba0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1580.945869] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cafe001e-f653-4f32-b911-5b46fb6f4c90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.954644] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1580.954644] env[62510]: value = "task-1768685" [ 1580.954644] env[62510]: _type = "Task" [ 1580.954644] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.963480] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768685, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.120796] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768681, 'name': ReconfigVM_Task, 'duration_secs': 1.261628} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.121101] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Reconfigured VM instance instance-00000030 to attach disk [datastore1] e3850272-9dae-4164-8f0e-f5513af23f49/e3850272-9dae-4164-8f0e-f5513af23f49.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1581.121964] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b628423f-e1c4-44a7-ac92-0ca6003e4659 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.130338] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1581.130338] env[62510]: value = "task-1768686" [ 1581.130338] env[62510]: _type = "Task" [ 1581.130338] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.140507] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768686, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.208008] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.208340] env[62510]: DEBUG nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance network_info: |[{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fb40b1-13", "ovs_interfaceid": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1581.208643] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768684, 'name': Rename_Task, 'duration_secs': 0.157218} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.208878] env[62510]: DEBUG oslo_concurrency.lockutils [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] Acquired lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.209110] env[62510]: DEBUG nova.network.neutron [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Refreshing network info cache for port 13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1581.210963] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:f9:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e85cbc56-fee0-41f7-bc70-64f31775ce92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13fb40b1-132b-407d-b6e0-eec141ae88a8', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1581.219255] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating folder: Project (3f85ce3c02964d36a77221ba8235978c). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1581.219571] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1581.220523] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72fdc118-0d92-4a60-be17-59e41207eecb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.223871] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f267fbee-242a-4423-bfbb-cb73c4a12c4f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.232406] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1581.232406] env[62510]: value = "task-1768687" [ 1581.232406] env[62510]: _type = "Task" [ 1581.232406] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.238703] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Created folder: Project (3f85ce3c02964d36a77221ba8235978c) in parent group-v367197. [ 1581.238897] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating folder: Instances. Parent ref: group-v367330. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1581.239523] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d88d3b7f-78a7-4aa7-9445-989a23c22b80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.243855] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.254168] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Created folder: Instances in parent group-v367330. [ 1581.254505] env[62510]: DEBUG oslo.service.loopingcall [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1581.254752] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1581.255090] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbb60e63-9df6-4fa7-a3ee-76222d9d7c23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.278520] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1581.278520] env[62510]: value = "task-1768690" [ 1581.278520] env[62510]: _type = "Task" [ 1581.278520] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.290333] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768690, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.350928] env[62510]: DEBUG oslo_concurrency.lockutils [req-3353f85c-e919-4c38-83af-f5cd3102d244 req-4432a084-148c-412e-ac09-215762f0161f service nova] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.351264] env[62510]: DEBUG oslo_concurrency.lockutils [req-3353f85c-e919-4c38-83af-f5cd3102d244 req-4432a084-148c-412e-ac09-215762f0161f service nova] Acquired lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.352305] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e91471-4d21-45b6-ba69-04cf46fd3931 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.376036] env[62510]: DEBUG oslo_concurrency.lockutils [req-3353f85c-e919-4c38-83af-f5cd3102d244 req-4432a084-148c-412e-ac09-215762f0161f service nova] Releasing lock "0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.376375] env[62510]: WARNING nova.compute.manager [req-3353f85c-e919-4c38-83af-f5cd3102d244 req-4432a084-148c-412e-ac09-215762f0161f service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Detach interface failed, port_id=37fc170f-1d32-4c6f-b871-74d459e02d76, reason: No device with interface-id 37fc170f-1d32-4c6f-b871-74d459e02d76 exists on VM: nova.exception.NotFound: No device with interface-id 37fc170f-1d32-4c6f-b871-74d459e02d76 exists on VM [ 1581.417469] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance d3e25d50-f315-439b-9e9f-8e454a0631d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1581.466996] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768685, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.640498] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768686, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.743651] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768687, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.790785] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768690, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.830132] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.830449] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.830782] env[62510]: DEBUG nova.network.neutron [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.920449] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0d27da5c-20f3-4df1-86d2-036c904fd657 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1581.968244] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768685, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655854} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.968527] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 350d5f83-d9ce-4997-bf57-70c4a4e22ba0/350d5f83-d9ce-4997-bf57-70c4a4e22ba0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1581.968709] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1581.968961] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df3d3346-5223-46c6-b12a-9f8608907d0b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.977688] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1581.977688] env[62510]: value = "task-1768691" [ 1581.977688] env[62510]: _type = "Task" [ 1581.977688] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.988561] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.019218] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.019486] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0029d975-bd48-4558-9f41-a0cf91336393" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.019706] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0029d975-bd48-4558-9f41-a0cf91336393-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.019886] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0029d975-bd48-4558-9f41-a0cf91336393-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.020113] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0029d975-bd48-4558-9f41-a0cf91336393-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.022376] env[62510]: INFO nova.compute.manager [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Terminating instance [ 1582.036812] env[62510]: DEBUG nova.network.neutron [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updated VIF entry in instance network info cache for port 13fb40b1-132b-407d-b6e0-eec141ae88a8. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1582.037264] env[62510]: DEBUG nova.network.neutron [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fb40b1-13", "ovs_interfaceid": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.143851] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768686, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.244448] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768687, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.290358] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768690, 'name': CreateVM_Task, 'duration_secs': 0.993162} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.290515] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1582.291167] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.291312] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.292020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1582.292020] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b21a8cef-624e-4907-9e97-fcf3a4e82eb5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.297725] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1582.297725] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e04823-f8bb-a4a3-67c2-e17052d9cff1" [ 1582.297725] env[62510]: _type = "Task" [ 1582.297725] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.306711] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e04823-f8bb-a4a3-67c2-e17052d9cff1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.423073] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1582.423316] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Migration 8d1549df-6eeb-4b96-9648-01fd9071d48d is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1582.423455] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance fae7e580-ab09-4fda-9cbe-0e066ddcb85c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1582.423761] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1582.423912] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4608MB phys_disk=200GB used_disk=20GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1582.492191] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06602} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.492469] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1582.493327] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9f7805-c1ec-4801-8479-ace0c134bbf1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.519588] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 350d5f83-d9ce-4997-bf57-70c4a4e22ba0/350d5f83-d9ce-4997-bf57-70c4a4e22ba0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1582.522472] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e76010af-3a00-4730-bcb1-5f6eb34a7b7d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.540382] env[62510]: DEBUG nova.compute.manager [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1582.540675] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1582.541163] env[62510]: DEBUG oslo_concurrency.lockutils [req-a481e112-c50f-4a21-9e8b-751d9ee62129 req-478f21e6-188f-43a0-baf5-198c96ebb233 service nova] Releasing lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.542121] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354b74aa-2521-4e78-a8bf-fda1236eea94 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.553326] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1582.554222] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18083159-ce96-47ee-ad87-0c6826301bad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.556317] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1582.556317] env[62510]: value = "task-1768692" [ 1582.556317] env[62510]: _type = "Task" [ 1582.556317] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.561914] env[62510]: DEBUG oslo_vmware.api [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1582.561914] env[62510]: value = "task-1768693" [ 1582.561914] env[62510]: _type = "Task" [ 1582.561914] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.571046] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.576914] env[62510]: DEBUG oslo_vmware.api [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.643657] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768686, 'name': Rename_Task, 'duration_secs': 1.172535} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.646602] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1582.647145] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b11b471-8acb-4207-a370-593082c3d5d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.656675] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1582.656675] env[62510]: value = "task-1768694" [ 1582.656675] env[62510]: _type = "Task" [ 1582.656675] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.671240] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768694, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.748336] env[62510]: DEBUG oslo_vmware.api [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768687, 'name': PowerOnVM_Task, 'duration_secs': 1.336542} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.748618] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1582.748834] env[62510]: INFO nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Took 8.39 seconds to spawn the instance on the hypervisor. [ 1582.749029] env[62510]: DEBUG nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1582.749928] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fd6eb4-08e2-40bf-b6f6-642ad6fa9048 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.765421] env[62510]: DEBUG nova.network.neutron [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [{"id": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "address": "fa:16:3e:b2:5f:4c", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa003d1ad-b7", "ovs_interfaceid": "a003d1ad-b7fa-4edc-a654-9a89e9533cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.810509] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e04823-f8bb-a4a3-67c2-e17052d9cff1, 'name': SearchDatastore_Task, 'duration_secs': 0.010544} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.810855] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.811104] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1582.811337] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.811901] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.811901] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1582.811901] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1f9b51d-96bd-4387-abb5-4c74a5c7a835 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.838438] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1582.838715] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1582.839938] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38527abe-8a7a-4d69-abe5-1d58819a3521 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.847325] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1582.847325] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52204610-e002-6287-9d9e-9f992f56211a" [ 1582.847325] env[62510]: _type = "Task" [ 1582.847325] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.860130] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52204610-e002-6287-9d9e-9f992f56211a, 'name': SearchDatastore_Task, 'duration_secs': 0.009558} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.863486] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0baf67d2-07e0-44e8-b6a5-57fbbf36cad7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.872050] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1582.872050] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527f55aa-4e17-2d3f-c83a-d3b4361c3066" [ 1582.872050] env[62510]: _type = "Task" [ 1582.872050] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.881711] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527f55aa-4e17-2d3f-c83a-d3b4361c3066, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.012283] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7673fe-06c7-4ccf-9ef8-23f08155e0d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.020905] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91cde14e-6620-4036-b619-bd8d72d2020c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.052490] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0deb7e-1400-4ae6-b4f7-003d09c41835 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.068218] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c878dea-c17f-41d8-9c20-5c7f3a0d252b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.075338] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768692, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.081030] env[62510]: DEBUG oslo_vmware.api [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768693, 'name': PowerOffVM_Task, 'duration_secs': 0.269375} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.088910] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1583.089112] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1583.089581] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1583.090931] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ebd6331-4b9d-4384-a6b4-f3050e2b5e6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.169548] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768694, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.188921] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1583.189164] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1583.189435] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleting the datastore file [datastore1] 0029d975-bd48-4558-9f41-a0cf91336393 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1583.189760] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7801209a-7773-43f5-8742-827ecfda6d58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.197331] env[62510]: DEBUG oslo_vmware.api [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1583.197331] env[62510]: value = "task-1768696" [ 1583.197331] env[62510]: _type = "Task" [ 1583.197331] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.212345] env[62510]: DEBUG oslo_vmware.api [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.270453] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-0029d975-bd48-4558-9f41-a0cf91336393" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.277145] env[62510]: INFO nova.compute.manager [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Took 37.03 seconds to build instance. [ 1583.382443] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527f55aa-4e17-2d3f-c83a-d3b4361c3066, 'name': SearchDatastore_Task, 'duration_secs': 0.009642} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.382443] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.382686] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.383060] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1ed1824-e9d6-4bcd-945c-df6262e3b547 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.392035] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1583.392035] env[62510]: value = "task-1768697" [ 1583.392035] env[62510]: _type = "Task" [ 1583.392035] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.400574] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768697, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.569358] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768692, 'name': ReconfigVM_Task, 'duration_secs': 0.678675} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.569766] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 350d5f83-d9ce-4997-bf57-70c4a4e22ba0/350d5f83-d9ce-4997-bf57-70c4a4e22ba0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1583.570530] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b6006b1-34a3-4209-b3c4-c0b399d805bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.579537] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1583.579537] env[62510]: value = "task-1768698" [ 1583.579537] env[62510]: _type = "Task" [ 1583.579537] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.589457] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768698, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.616683] env[62510]: ERROR nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [req-2fb36550-4725-4d04-bb80-21abdbcfd765] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2fb36550-4725-4d04-bb80-21abdbcfd765"}]} [ 1583.637578] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1583.657660] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1583.657877] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1583.674662] env[62510]: DEBUG oslo_vmware.api [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768694, 'name': PowerOnVM_Task, 'duration_secs': 0.604387} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.674662] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1583.675226] env[62510]: INFO nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Took 11.91 seconds to spawn the instance on the hypervisor. [ 1583.675226] env[62510]: DEBUG nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1583.676435] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1583.679765] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6deac0f8-35f2-4167-bd2f-a6c99e1ec27a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.713380] env[62510]: DEBUG oslo_vmware.api [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173091} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.713380] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1583.714183] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1583.714426] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1583.714658] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1583.714836] env[62510]: INFO nova.compute.manager [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1583.715187] env[62510]: DEBUG oslo.service.loopingcall [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1583.715708] env[62510]: DEBUG nova.compute.manager [-] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1583.715814] env[62510]: DEBUG nova.network.neutron [-] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1583.777088] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4239e95f-13fb-4ef3-a27e-023d5e043d61 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0029d975-bd48-4558-9f41-a0cf91336393-37fc170f-1d32-4c6f-b871-74d459e02d76" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.912s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.781956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55e72727-cc8c-4ae6-b8f6-cd16f5f6db3d tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.677s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.905909] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768697, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.089169] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768698, 'name': Rename_Task, 'duration_secs': 0.21399} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.091501] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1584.091923] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f54f555c-b9c1-4d56-a87b-546494a7d1a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.099330] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1584.099330] env[62510]: value = "task-1768699" [ 1584.099330] env[62510]: _type = "Task" [ 1584.099330] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.111592] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.205504] env[62510]: INFO nova.compute.manager [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Took 44.12 seconds to build instance. [ 1584.239139] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7fa079-5eba-4b6d-8f2e-6a9b8d0819a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.247548] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e868a268-17e8-450a-b8db-2fabe862c9c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.279787] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a85be4-41d3-476d-b7e4-f33529e671de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.285121] env[62510]: DEBUG nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1584.291342] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6077f2-3846-4d79-a127-fbb370188dc4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.295568] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "2c5c38c1-511f-4aae-969a-eb6de128fae7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.295794] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.296010] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "2c5c38c1-511f-4aae-969a-eb6de128fae7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.296194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.296363] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.298392] env[62510]: INFO nova.compute.manager [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Terminating instance [ 1584.313185] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1584.405571] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768697, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58606} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.405907] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1584.406320] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1584.406758] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b82e24f0-bca2-42d1-9eca-816be86af9e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.415566] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1584.415566] env[62510]: value = "task-1768700" [ 1584.415566] env[62510]: _type = "Task" [ 1584.415566] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.427331] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768700, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.613217] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768699, 'name': PowerOnVM_Task} progress is 79%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.707063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec7e29d9-965a-4fba-bbd0-616005849e52 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "e3850272-9dae-4164-8f0e-f5513af23f49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.294s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.801581] env[62510]: DEBUG nova.compute.manager [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1584.801820] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1584.804374] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f5ef93-602e-4b92-8f97-8948e37fca69 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.811467] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.819467] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1584.819956] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b7ce919-547f-49c3-8203-962db056d794 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.829706] env[62510]: DEBUG oslo_vmware.api [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1584.829706] env[62510]: value = "task-1768701" [ 1584.829706] env[62510]: _type = "Task" [ 1584.829706] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.841038] env[62510]: DEBUG oslo_vmware.api [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768701, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.854998] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1584.854998] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 72 to 73 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1584.854998] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1584.928211] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768700, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203477} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.928211] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1584.929198] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2251767-c463-4dc2-9d20-a1dd3a6d818a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.956496] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1584.956871] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bda41a78-6d56-4ae2-adc4-1616fd5a636d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.980085] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1584.980085] env[62510]: value = "task-1768702" [ 1584.980085] env[62510]: _type = "Task" [ 1584.980085] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.990472] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768702, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.117165] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768699, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.212808] env[62510]: DEBUG nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1585.326052] env[62510]: DEBUG nova.network.neutron [-] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.341859] env[62510]: DEBUG oslo_vmware.api [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768701, 'name': PowerOffVM_Task, 'duration_secs': 0.31862} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.345160] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1585.345160] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1585.345160] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4cba5ec2-2b2d-4648-bacd-809f8331ee7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.358226] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1585.358577] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.199s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.358778] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.548s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.362632] env[62510]: INFO nova.compute.claims [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1585.366224] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1585.366393] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 1585.430789] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1585.431120] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1585.431213] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Deleting the datastore file [datastore1] 2c5c38c1-511f-4aae-969a-eb6de128fae7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1585.431481] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fc8506d-b28c-4359-8db6-8688b7391508 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.444540] env[62510]: DEBUG oslo_vmware.api [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for the task: (returnval){ [ 1585.444540] env[62510]: value = "task-1768704" [ 1585.444540] env[62510]: _type = "Task" [ 1585.444540] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.454445] env[62510]: DEBUG oslo_vmware.api [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.495647] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768702, 'name': ReconfigVM_Task, 'duration_secs': 0.407749} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.495946] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Reconfigured VM instance instance-00000033 to attach disk [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1585.499075] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4119a1e-f28c-4fcb-8b09-23765dc5eee3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.506951] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1585.506951] env[62510]: value = "task-1768705" [ 1585.506951] env[62510]: _type = "Task" [ 1585.506951] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.516024] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768705, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.612531] env[62510]: DEBUG oslo_vmware.api [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768699, 'name': PowerOnVM_Task, 'duration_secs': 1.055915} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.612805] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1585.613023] env[62510]: INFO nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Took 9.14 seconds to spawn the instance on the hypervisor. [ 1585.613239] env[62510]: DEBUG nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1585.614090] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1cd92d-4e83-41db-831a-4de24b5009ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.737863] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.829032] env[62510]: INFO nova.compute.manager [-] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Took 2.11 seconds to deallocate network for instance. [ 1585.879026] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] There are 23 instances to clean {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 1585.879858] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2d2ab209-8072-4e64-8170-50d96d71bc54] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1585.955638] env[62510]: DEBUG oslo_vmware.api [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Task: {'id': task-1768704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145233} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.955922] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1585.956155] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1585.956343] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1585.956547] env[62510]: INFO nova.compute.manager [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1585.956816] env[62510]: DEBUG oslo.service.loopingcall [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.957049] env[62510]: DEBUG nova.compute.manager [-] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1585.957163] env[62510]: DEBUG nova.network.neutron [-] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1586.023929] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768705, 'name': Rename_Task, 'duration_secs': 0.316509} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.025803] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1586.026982] env[62510]: DEBUG nova.compute.manager [req-2f907d44-08a5-463b-9496-6bde4f01df2a req-e971dd00-43f7-4d28-8c63-900cee44448e service nova] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Received event network-vif-deleted-a003d1ad-b7fa-4edc-a654-9a89e9533cbd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1586.027291] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b33393b-6753-4291-9481-f67a303f5557 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.035144] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1586.035144] env[62510]: value = "task-1768706" [ 1586.035144] env[62510]: _type = "Task" [ 1586.035144] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.044923] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.132225] env[62510]: INFO nova.compute.manager [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Took 38.90 seconds to build instance. [ 1586.336601] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.387287] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: a09a34de-fe7c-414b-8a89-2e9271c72a5c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1586.547628] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768706, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.634040] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bad7c2fe-7914-4a1d-92c3-86873638954c tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.456s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.875768] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e06bf0-55fc-4493-84d6-8aca6f2b982c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.884488] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0405ba27-b987-4697-97c7-88938c40dff6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.890481] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e9711202-67f3-4fe2-befb-f28722ddea33] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1586.927314] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0158d7af-d3bb-4d9c-a7c6-fbab943977e2] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1586.929945] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825ac059-4cce-4f00-8606-91c47007072f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.939972] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa6ab95-b6a3-49b3-9b02-ff7e337702b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.956989] env[62510]: DEBUG nova.compute.provider_tree [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.959065] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.959441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.960784] env[62510]: INFO nova.compute.manager [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Shelving [ 1587.047427] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768706, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.294619] env[62510]: DEBUG nova.network.neutron [-] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.434544] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8e3cefa1-fab9-469e-8a32-31b4a8ecf4be] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1587.463304] env[62510]: DEBUG nova.scheduler.client.report [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1587.548013] env[62510]: DEBUG oslo_vmware.api [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768706, 'name': PowerOnVM_Task, 'duration_secs': 1.204983} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.548013] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1587.548144] env[62510]: INFO nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Took 8.36 seconds to spawn the instance on the hypervisor. [ 1587.548230] env[62510]: DEBUG nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1587.549025] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acddbaf9-2f28-4432-a4f7-a24219c677bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.800019] env[62510]: INFO nova.compute.manager [-] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Took 1.84 seconds to deallocate network for instance. [ 1587.938230] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: aca56820-5a06-43dd-9d98-25421f7ef6a6] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1587.965752] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.966421] env[62510]: DEBUG nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1587.970251] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.036s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.971999] env[62510]: INFO nova.compute.claims [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1587.975935] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1587.976280] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74bc262b-7c81-4a66-a81b-5dfa350b752f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.987430] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1587.987430] env[62510]: value = "task-1768707" [ 1587.987430] env[62510]: _type = "Task" [ 1587.987430] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.999080] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.069038] env[62510]: INFO nova.compute.manager [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Took 38.59 seconds to build instance. [ 1588.308781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.441770] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: a040671e-941d-4406-81af-f2f7a4b690e4] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1588.479407] env[62510]: DEBUG nova.compute.utils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1588.483995] env[62510]: DEBUG nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1588.483995] env[62510]: DEBUG nova.network.neutron [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1588.499077] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768707, 'name': PowerOffVM_Task, 'duration_secs': 0.350947} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.499354] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1588.500494] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46503e64-cb8f-4e68-8c2d-7a5d9037f3e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.522432] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4690465a-aeb0-4aa0-b530-0492e20e9686 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.557527] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1588.557872] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing instance network info cache due to event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1588.558025] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquiring lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.558349] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquired lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.558349] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1588.570045] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf5af7c1-eda8-4084-9906-23ee1a6607a9 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.533s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.577487] env[62510]: DEBUG nova.policy [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '525928ba40cb4cebb1e1d9e25adeae8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '98d6c12eccf74757b3cbc2c8acddeb19', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1588.946771] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0a940fd0-73cc-403d-9afc-a989c67dfdef] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1588.981696] env[62510]: DEBUG nova.compute.utils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1589.036413] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1589.036721] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2592b457-fb95-4fb0-b338-8e54bb0c2fa7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.046111] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1589.046111] env[62510]: value = "task-1768708" [ 1589.046111] env[62510]: _type = "Task" [ 1589.046111] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.054625] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768708, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.339556] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updated VIF entry in instance network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.339910] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [{"id": "b93d3484-b909-4060-aef6-1f45f91f2325", "address": "fa:16:3e:db:60:53", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d3484-b9", "ovs_interfaceid": "b93d3484-b909-4060-aef6-1f45f91f2325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.445930] env[62510]: DEBUG nova.network.neutron [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Successfully created port: 9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1589.451111] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: d42295c9-2b0e-471e-9a87-1d7367de9588] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1589.491117] env[62510]: DEBUG nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1589.555876] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768708, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.557507] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e957f9-4a3e-4978-8b08-2b7bcf4ddab3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.565089] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65427e5-84ff-42b1-86fc-fb59837955d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.603474] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae50d735-df86-4db0-8990-79824775f228 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.613311] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ede9e3-44a0-4a0a-85d2-d8911dfcd91f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.629104] env[62510]: DEBUG nova.compute.provider_tree [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.844374] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Releasing lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.844768] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Received event network-changed-d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1589.844988] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Refreshing instance network info cache due to event network-changed-d769f744-6168-49b4-b195-b608fec386e8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1589.845262] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquiring lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.845664] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquired lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.845884] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Refreshing network info cache for port d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.947438] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "3df19233-2448-4030-ae1d-a4f98ccffba9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.948015] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.954314] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: cfe53f9c-d78b-4af7-b991-f3549c03f22d] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1590.057591] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768708, 'name': CreateSnapshot_Task, 'duration_secs': 0.946766} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.057865] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1590.058627] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f968c4c8-bd28-4dca-812f-56b40b7176e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.132991] env[62510]: DEBUG nova.scheduler.client.report [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1590.454481] env[62510]: DEBUG nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1590.461026] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9a1a0428-8ccd-4614-8853-ef3eeec23d55] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1590.500540] env[62510]: DEBUG nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1590.534614] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:34:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1073076194',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-97347246',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1590.535395] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1590.535395] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1590.535395] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1590.535395] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1590.535604] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1590.535714] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1590.535896] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1590.536055] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1590.536222] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1590.536421] env[62510]: DEBUG nova.virt.hardware [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1590.537448] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843d15bd-8f6b-43b4-933a-2476d0f99e90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.546972] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8741fa-4400-45da-a27b-d8d0e7df46d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.576237] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1590.576579] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-613caaab-6455-4f21-870b-e13c5b9ecab0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.586870] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1590.586870] env[62510]: value = "task-1768709" [ 1590.586870] env[62510]: _type = "Task" [ 1590.586870] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.596701] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768709, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.599939] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Updated VIF entry in instance network info cache for port d769f744-6168-49b4-b195-b608fec386e8. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1590.600135] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Updating instance_info_cache with network_info: [{"id": "d769f744-6168-49b4-b195-b608fec386e8", "address": "fa:16:3e:4e:6e:a6", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd769f744-61", "ovs_interfaceid": "d769f744-6168-49b4-b195-b608fec386e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.639058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.639058] env[62510]: DEBUG nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1590.641738] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.680s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.642062] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.644555] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.915s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.644854] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.646642] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.357s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.646834] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.648919] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.787s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.650386] env[62510]: INFO nova.compute.claims [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1590.675692] env[62510]: INFO nova.scheduler.client.report [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Deleted allocations for instance f9eb5110-28ec-474e-b80e-0bfcee51483d [ 1590.680594] env[62510]: INFO nova.scheduler.client.report [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Deleted allocations for instance 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb [ 1590.705243] env[62510]: INFO nova.scheduler.client.report [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Deleted allocations for instance 2c5d137d-4fd5-4035-a04f-bdb76e90edd7 [ 1590.963084] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 26b283b0-98b4-4a15-abe0-fbf97e1f49eb] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1590.986818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.100843] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768709, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.103420] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Releasing lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.103910] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Received event network-vif-deleted-9d4d0ed7-cab1-4f7d-9eda-faa60a248129 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1591.104350] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Received event network-changed-d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1591.104724] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Refreshing instance network info cache due to event network-changed-d769f744-6168-49b4-b195-b608fec386e8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1591.105141] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquiring lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.105467] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquired lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.105816] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Refreshing network info cache for port d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1591.155877] env[62510]: DEBUG nova.compute.utils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1591.162409] env[62510]: DEBUG nova.compute.manager [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Received event network-changed-766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1591.162409] env[62510]: DEBUG nova.compute.manager [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Refreshing instance network info cache due to event network-changed-766401c7-3f55-48f7-a695-d2db7a829ade. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1591.162409] env[62510]: DEBUG oslo_concurrency.lockutils [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] Acquiring lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.162409] env[62510]: DEBUG oslo_concurrency.lockutils [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] Acquired lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.162409] env[62510]: DEBUG nova.network.neutron [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Refreshing network info cache for port 766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1591.162409] env[62510]: DEBUG nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Not allocating networking since 'none' was specified. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1591.201490] env[62510]: DEBUG oslo_concurrency.lockutils [None req-615cb2c1-1c96-4930-8dea-7e7662d10cce tempest-ServerTagsTestJSON-1558018952 tempest-ServerTagsTestJSON-1558018952-project-member] Lock "f9eb5110-28ec-474e-b80e-0bfcee51483d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.012s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.206033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e41b9908-a43d-436e-b104-a3be7f7188cd tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "4f9bfb02-8aea-45a9-85ea-97e70f0d41fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.622s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.216341] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e2b1b05d-7c32-4aa8-87aa-db4a5f32e3c6 tempest-AttachInterfacesV270Test-564870273 tempest-AttachInterfacesV270Test-564870273-project-member] Lock "2c5d137d-4fd5-4035-a04f-bdb76e90edd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.203s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.468315] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: c58184e7-bf4f-406b-a778-9b8f60740fe6] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1591.484727] env[62510]: DEBUG nova.network.neutron [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Successfully updated port: 9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1591.598930] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768709, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.667593] env[62510]: DEBUG nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1591.730593] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.731466] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.731746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.731904] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.732115] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.734401] env[62510]: INFO nova.compute.manager [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Terminating instance [ 1591.972678] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 7490c825-dfd5-409c-9fd6-0e78643338fb] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1591.987815] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.987815] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.987815] env[62510]: DEBUG nova.network.neutron [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1592.011910] env[62510]: DEBUG nova.network.neutron [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updated VIF entry in instance network info cache for port 766401c7-3f55-48f7-a695-d2db7a829ade. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1592.011910] env[62510]: DEBUG nova.network.neutron [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updating instance_info_cache with network_info: [{"id": "766401c7-3f55-48f7-a695-d2db7a829ade", "address": "fa:16:3e:c4:51:1d", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap766401c7-3f", "ovs_interfaceid": "766401c7-3f55-48f7-a695-d2db7a829ade", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.107647] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768709, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.126099] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Updated VIF entry in instance network info cache for port d769f744-6168-49b4-b195-b608fec386e8. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1592.129240] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Updating instance_info_cache with network_info: [{"id": "d769f744-6168-49b4-b195-b608fec386e8", "address": "fa:16:3e:4e:6e:a6", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd769f744-61", "ovs_interfaceid": "d769f744-6168-49b4-b195-b608fec386e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.159941] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.160273] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.160613] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.160799] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.160978] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.165981] env[62510]: INFO nova.compute.manager [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Terminating instance [ 1592.186345] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80f456d-308e-461c-a345-fd9d5565f233 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.196387] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8d4b51-05f6-4b34-9a3f-efeeef1e0fc2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.239552] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3517d777-269a-409e-a6ec-2cbbadb5abd0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.243877] env[62510]: DEBUG nova.compute.manager [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1592.244127] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1592.244937] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ca7647-868f-4ff2-9e87-dce304cbf178 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.255575] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b4941f-8bc8-4860-be27-6443a976eb2a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.260010] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1592.260278] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b259949-215c-4777-ae0f-696154754d06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.273596] env[62510]: DEBUG nova.compute.provider_tree [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.277511] env[62510]: DEBUG oslo_vmware.api [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1592.277511] env[62510]: value = "task-1768710" [ 1592.277511] env[62510]: _type = "Task" [ 1592.277511] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.287951] env[62510]: DEBUG oslo_vmware.api [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.479242] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 35a98028-0fc6-4e13-b50d-5dacf205dbe5] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1592.515194] env[62510]: DEBUG oslo_concurrency.lockutils [req-ed9992bf-40dc-42f4-82b1-c7c2b58ce3be req-d551868a-215f-4a30-a63e-6930aee876c0 service nova] Releasing lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.601961] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768709, 'name': CloneVM_Task, 'duration_secs': 1.680456} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.602285] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Created linked-clone VM from snapshot [ 1592.603056] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4caebae-8ab0-4dc8-a798-46e427b08738 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.611922] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Uploading image b54fc7f8-0408-41bd-abcd-6c673fa40237 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1592.629566] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Releasing lock "refresh_cache-e3850272-9dae-4164-8f0e-f5513af23f49" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.630104] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1592.630295] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing instance network info cache due to event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1592.630518] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquiring lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.630663] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquired lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.630825] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1592.645519] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1592.645519] env[62510]: value = "vm-367334" [ 1592.645519] env[62510]: _type = "VirtualMachine" [ 1592.645519] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1592.645836] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d2385cf9-c4de-4085-85e6-4ad6af0cdf19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.655963] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease: (returnval){ [ 1592.655963] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529489c7-f9af-5367-ed39-d2fb49a92fbf" [ 1592.655963] env[62510]: _type = "HttpNfcLease" [ 1592.655963] env[62510]: } obtained for exporting VM: (result){ [ 1592.655963] env[62510]: value = "vm-367334" [ 1592.655963] env[62510]: _type = "VirtualMachine" [ 1592.655963] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1592.656304] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the lease: (returnval){ [ 1592.656304] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529489c7-f9af-5367-ed39-d2fb49a92fbf" [ 1592.656304] env[62510]: _type = "HttpNfcLease" [ 1592.656304] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1592.664317] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1592.664317] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529489c7-f9af-5367-ed39-d2fb49a92fbf" [ 1592.664317] env[62510]: _type = "HttpNfcLease" [ 1592.664317] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1592.667288] env[62510]: DEBUG nova.compute.manager [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1592.668206] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1592.668528] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763d1e92-a43a-43f3-9356-3945aa8a7d13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.676969] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1592.677256] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32803290-89d4-405e-bae2-fcee8e732b90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.681298] env[62510]: DEBUG nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1592.685407] env[62510]: DEBUG oslo_vmware.api [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1592.685407] env[62510]: value = "task-1768712" [ 1592.685407] env[62510]: _type = "Task" [ 1592.685407] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.695026] env[62510]: DEBUG oslo_vmware.api [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768712, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.704817] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1592.705147] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1592.705313] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1592.705530] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1592.705687] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1592.705838] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1592.706078] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1592.706597] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1592.706597] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1592.706753] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1592.706813] env[62510]: DEBUG nova.virt.hardware [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1592.707718] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca78ad57-2ecd-4adf-b02c-d59e0bbb1522 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.718107] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab87eb9-a4dd-46c9-a9a7-dbdbc2e5a4d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.735288] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1592.741443] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Creating folder: Project (1e4a0ea37e7d4ee987eecf03ff66067a). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1592.741874] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd29bd92-7fd3-408d-88e0-3bae29c977e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.748213] env[62510]: DEBUG nova.network.neutron [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1592.755235] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Created folder: Project (1e4a0ea37e7d4ee987eecf03ff66067a) in parent group-v367197. [ 1592.756376] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Creating folder: Instances. Parent ref: group-v367335. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1592.756376] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87c88875-323e-49ff-b0ed-7a10e9401b33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.768502] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Created folder: Instances in parent group-v367335. [ 1592.769240] env[62510]: DEBUG oslo.service.loopingcall [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1592.769553] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1592.769944] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e499e546-cccd-47d7-9c8b-e0e4dee41f0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.787364] env[62510]: DEBUG nova.scheduler.client.report [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1592.801982] env[62510]: DEBUG oslo_vmware.api [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768710, 'name': PowerOffVM_Task, 'duration_secs': 0.382659} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.804932] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1592.805293] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1592.809587] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1592.809587] env[62510]: value = "task-1768715" [ 1592.809587] env[62510]: _type = "Task" [ 1592.809587] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.810028] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0eab8c32-1145-4339-bdb1-ba27f152286f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.827320] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768715, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.918350] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1592.918762] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1592.919029] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Deleting the datastore file [datastore1] bd21dd81-c0d9-4ff1-9183-0b4622dc5afb {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1592.919371] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8ac7684-fe1d-4f97-a828-7aee3b6756f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.928970] env[62510]: DEBUG oslo_vmware.api [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1592.928970] env[62510]: value = "task-1768717" [ 1592.928970] env[62510]: _type = "Task" [ 1592.928970] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.948988] env[62510]: DEBUG oslo_vmware.api [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.982084] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 75e06a24-b96c-4a42-bc2d-b0b960e3301a] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1593.167188] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1593.167188] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529489c7-f9af-5367-ed39-d2fb49a92fbf" [ 1593.167188] env[62510]: _type = "HttpNfcLease" [ 1593.167188] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1593.167494] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1593.167494] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529489c7-f9af-5367-ed39-d2fb49a92fbf" [ 1593.167494] env[62510]: _type = "HttpNfcLease" [ 1593.167494] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1593.168297] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766a1ac8-6cc5-4a87-97ae-8a04e8b37ed2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.178444] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c3c82f-e51f-d35c-ffae-9074e967168c/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1593.178841] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c3c82f-e51f-d35c-ffae-9074e967168c/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1593.254137] env[62510]: DEBUG nova.network.neutron [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Updating instance_info_cache with network_info: [{"id": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "address": "fa:16:3e:17:66:3e", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a53a8d4-8b", "ovs_interfaceid": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.268934] env[62510]: DEBUG oslo_vmware.api [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768712, 'name': PowerOffVM_Task, 'duration_secs': 0.25014} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.269280] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1593.269461] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1593.269743] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-980ec630-81a4-47f3-91ef-2b1703329ea8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.293373] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.294024] env[62510]: DEBUG nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1593.299748] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f977f474-7ccc-47e5-89d9-80d671fd073b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.304254] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.344s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.304536] env[62510]: INFO nova.compute.claims [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1593.313281] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.313632] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.313847] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.314040] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.316550] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.320289] env[62510]: INFO nova.compute.manager [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Terminating instance [ 1593.330126] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768715, 'name': CreateVM_Task, 'duration_secs': 0.435074} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.333075] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1593.333075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.333075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.333075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1593.333075] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1da8a992-12a8-45ec-b5df-e83dee2001d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.340025] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1593.340025] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52dcfd63-5c26-fedb-c0a9-464ba96f25e1" [ 1593.340025] env[62510]: _type = "Task" [ 1593.340025] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.356241] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dcfd63-5c26-fedb-c0a9-464ba96f25e1, 'name': SearchDatastore_Task, 'duration_secs': 0.01116} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.361458] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.361647] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1593.361963] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.362082] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.362253] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1593.363667] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cb452e9-2a44-47a8-8f41-713fdfa154ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.376074] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1593.376278] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1593.377436] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c6009ef-d841-4aa1-bbd9-a96186848da1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.384382] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1593.384382] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52805600-661a-168f-103d-b66bdde4ed4f" [ 1593.384382] env[62510]: _type = "Task" [ 1593.384382] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.396438] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52805600-661a-168f-103d-b66bdde4ed4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.400800] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1593.400800] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1593.400800] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Deleting the datastore file [datastore1] b7c2c768-573b-4c1c-ade7-45fb87b95d41 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1593.400800] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff02880a-cd91-4716-b42c-abc34cf9c027 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.407205] env[62510]: DEBUG oslo_vmware.api [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for the task: (returnval){ [ 1593.407205] env[62510]: value = "task-1768719" [ 1593.407205] env[62510]: _type = "Task" [ 1593.407205] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.416730] env[62510]: DEBUG oslo_vmware.api [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.449406] env[62510]: DEBUG oslo_vmware.api [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194098} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.449406] env[62510]: DEBUG nova.compute.manager [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1593.453038] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1593.453038] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1593.453038] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1593.453038] env[62510]: INFO nova.compute.manager [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1593.453038] env[62510]: DEBUG oslo.service.loopingcall [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.453038] env[62510]: DEBUG nova.compute.manager [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1593.453038] env[62510]: DEBUG nova.network.neutron [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1593.486905] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: c7d875ee-2b9c-48e4-9bf9-f7602e75ec62] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1593.584652] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updated VIF entry in instance network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1593.585109] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [{"id": "b93d3484-b909-4060-aef6-1f45f91f2325", "address": "fa:16:3e:db:60:53", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d3484-b9", "ovs_interfaceid": "b93d3484-b909-4060-aef6-1f45f91f2325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.763587] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Releasing lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.764167] env[62510]: DEBUG nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Instance network_info: |[{"id": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "address": "fa:16:3e:17:66:3e", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a53a8d4-8b", "ovs_interfaceid": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1593.764567] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:66:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a53a8d4-8b7d-4167-b888-f20b2fce23c5', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1593.777294] env[62510]: DEBUG oslo.service.loopingcall [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.778209] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1593.778209] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0adb1c2f-847e-4474-a239-dec09160f2d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.812519] env[62510]: DEBUG nova.compute.utils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1593.819286] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1593.819286] env[62510]: value = "task-1768720" [ 1593.819286] env[62510]: _type = "Task" [ 1593.819286] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.820031] env[62510]: DEBUG nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1593.820580] env[62510]: DEBUG nova.network.neutron [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1593.828379] env[62510]: DEBUG nova.compute.manager [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1593.828379] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1593.832847] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5547d027-346e-4b5e-8c6a-0a7638b5ae7d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.845728] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768720, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.848167] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1593.848540] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-514b0cb0-0b44-4661-960a-d1c3fb004e24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.858202] env[62510]: DEBUG oslo_vmware.api [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1593.858202] env[62510]: value = "task-1768721" [ 1593.858202] env[62510]: _type = "Task" [ 1593.858202] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.868459] env[62510]: DEBUG oslo_vmware.api [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.897825] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52805600-661a-168f-103d-b66bdde4ed4f, 'name': SearchDatastore_Task, 'duration_secs': 0.011464} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.898820] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce1398f8-95f7-4ff9-9e75-6cee8e4e5363 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.907497] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1593.907497] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cb753d-e15f-0a4e-5ab1-e54905f91917" [ 1593.907497] env[62510]: _type = "Task" [ 1593.907497] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.926430] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cb753d-e15f-0a4e-5ab1-e54905f91917, 'name': SearchDatastore_Task, 'duration_secs': 0.013514} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.931788] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.932335] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] fa43a538-1aae-4642-8370-70f2a49ca92c/fa43a538-1aae-4642-8370-70f2a49ca92c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1593.933123] env[62510]: DEBUG oslo_vmware.api [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Task: {'id': task-1768719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137351} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.933549] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7e65f29-191b-4697-ab2f-7631fb03d77f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.937613] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1593.938172] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1593.938818] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1593.939292] env[62510]: INFO nova.compute.manager [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1593.939884] env[62510]: DEBUG oslo.service.loopingcall [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.940600] env[62510]: DEBUG nova.compute.manager [-] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1593.940941] env[62510]: DEBUG nova.network.neutron [-] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1593.951937] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1593.951937] env[62510]: value = "task-1768722" [ 1593.951937] env[62510]: _type = "Task" [ 1593.951937] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.960857] env[62510]: DEBUG nova.policy [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5aefdf51c02740b9ae15e49254b8c4f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cd2a30849f1f4574a890619b3fff7010', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1593.969072] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768722, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.978296] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.989778] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 585784c5-b56a-435d-8b22-53bc5cb39b25] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1594.060847] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.061351] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.062152] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.062461] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.062730] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.065929] env[62510]: INFO nova.compute.manager [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Terminating instance [ 1594.090530] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Releasing lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.091793] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Received event network-changed-766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1594.092042] env[62510]: DEBUG nova.compute.manager [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Refreshing instance network info cache due to event network-changed-766401c7-3f55-48f7-a695-d2db7a829ade. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1594.092679] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquiring lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.092679] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Acquired lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.092814] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Refreshing network info cache for port 766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1594.328064] env[62510]: DEBUG nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1594.351953] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768720, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.373575] env[62510]: DEBUG oslo_vmware.api [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768721, 'name': PowerOffVM_Task, 'duration_secs': 0.241926} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.374092] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1594.374092] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1594.374590] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-588af898-82c8-4bba-98e1-efbb06541b9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.473940] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768722, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.495725] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 731e7110-9709-4c4e-96d2-00e21e67c6e3] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1594.562656] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1594.562932] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1594.564128] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Deleting the datastore file [datastore1] 612e95d6-28ef-4c9a-b5d9-fd83122bfa44 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1594.569427] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50a3a049-7066-47e3-9169-c6be3d863168 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.574861] env[62510]: DEBUG nova.compute.manager [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1594.575144] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1594.576693] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4320a7-9a33-458a-bb4c-5890f45c369b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.590954] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1594.597373] env[62510]: DEBUG nova.network.neutron [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Successfully created port: 30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1594.601990] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7cf44f08-df80-4d47-8a60-00b14922da6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.608719] env[62510]: DEBUG oslo_vmware.api [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for the task: (returnval){ [ 1594.608719] env[62510]: value = "task-1768724" [ 1594.608719] env[62510]: _type = "Task" [ 1594.608719] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.616332] env[62510]: DEBUG oslo_vmware.api [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1594.616332] env[62510]: value = "task-1768725" [ 1594.616332] env[62510]: _type = "Task" [ 1594.616332] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.623945] env[62510]: DEBUG oslo_vmware.api [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.637030] env[62510]: DEBUG oslo_vmware.api [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768725, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.645622] env[62510]: INFO nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Port 766401c7-3f55-48f7-a695-d2db7a829ade from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1594.645903] env[62510]: DEBUG nova.network.neutron [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.778733] env[62510]: DEBUG nova.network.neutron [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.853439] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768720, 'name': CreateVM_Task, 'duration_secs': 0.727396} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.853439] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1594.854159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.854425] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.854859] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1594.855156] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecba2d5a-c089-4327-bfab-3c59f30aef4d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.861957] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1594.861957] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]526f49e1-f7fa-3991-6170-d95e7613ffdb" [ 1594.861957] env[62510]: _type = "Task" [ 1594.861957] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.878163] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526f49e1-f7fa-3991-6170-d95e7613ffdb, 'name': SearchDatastore_Task, 'duration_secs': 0.01074} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.878709] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.879172] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1594.879839] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.879839] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.879839] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.880232] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7815129b-a881-41a6-9cd9-b994edbde4ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.895050] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.895370] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1594.897146] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0410ced5-9a0c-49b4-81cc-826c83d6b1f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.907544] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1594.907544] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523bb9d2-89ba-09df-9861-81eeca59e80f" [ 1594.907544] env[62510]: _type = "Task" [ 1594.907544] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.917490] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523bb9d2-89ba-09df-9861-81eeca59e80f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.964262] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768722, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.673797} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.967388] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] fa43a538-1aae-4642-8370-70f2a49ca92c/fa43a538-1aae-4642-8370-70f2a49ca92c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1594.967650] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1594.968394] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b28ce583-87b9-4328-af83-34f8adb4e097 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.978011] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1594.978011] env[62510]: value = "task-1768726" [ 1594.978011] env[62510]: _type = "Task" [ 1594.978011] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.989760] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768726, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.999657] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: eb840df4-edc1-44cb-84c9-f31b7b56b6bd] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1595.078970] env[62510]: DEBUG nova.compute.manager [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Received event network-vif-plugged-9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1595.078970] env[62510]: DEBUG oslo_concurrency.lockutils [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] Acquiring lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.079227] env[62510]: DEBUG oslo_concurrency.lockutils [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.079420] env[62510]: DEBUG oslo_concurrency.lockutils [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.079589] env[62510]: DEBUG nova.compute.manager [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] No waiting events found dispatching network-vif-plugged-9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1595.079820] env[62510]: WARNING nova.compute.manager [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Received unexpected event network-vif-plugged-9a53a8d4-8b7d-4167-b888-f20b2fce23c5 for instance with vm_state building and task_state spawning. [ 1595.079898] env[62510]: DEBUG nova.compute.manager [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Received event network-changed-9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1595.080064] env[62510]: DEBUG nova.compute.manager [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Refreshing instance network info cache due to event network-changed-9a53a8d4-8b7d-4167-b888-f20b2fce23c5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1595.080250] env[62510]: DEBUG oslo_concurrency.lockutils [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] Acquiring lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.080381] env[62510]: DEBUG oslo_concurrency.lockutils [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] Acquired lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.080534] env[62510]: DEBUG nova.network.neutron [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Refreshing network info cache for port 9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1595.089884] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e43569e-1e2e-46b5-a5b1-5ffc409c4ffd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.100409] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a7fc5b-e909-4993-9dfb-94726cd5b733 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.146196] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1e7444-3c3f-499f-8716-a5137bef646d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.149211] env[62510]: DEBUG oslo_concurrency.lockutils [req-2c2d4ff1-66c6-45ef-98b6-38d20dd11478 req-743f7339-8841-458d-b7b0-51fabf02841c service nova] Releasing lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.162936] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76feac52-982c-4993-9dc3-1094239206db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.167596] env[62510]: DEBUG oslo_vmware.api [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768725, 'name': PowerOffVM_Task, 'duration_secs': 0.247338} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.168241] env[62510]: DEBUG oslo_vmware.api [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Task: {'id': task-1768724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201572} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.168241] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1595.168466] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1595.168759] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1595.168967] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1595.169205] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1595.169574] env[62510]: INFO nova.compute.manager [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1595.169804] env[62510]: DEBUG oslo.service.loopingcall [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1595.170642] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-203fed93-ff96-4ec8-a623-1bce7e8c67c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.172996] env[62510]: DEBUG nova.compute.manager [-] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1595.173140] env[62510]: DEBUG nova.network.neutron [-] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1595.184335] env[62510]: DEBUG nova.compute.provider_tree [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.195702] env[62510]: DEBUG nova.network.neutron [-] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.287595] env[62510]: INFO nova.compute.manager [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Took 1.83 seconds to deallocate network for instance. [ 1595.335971] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1595.335971] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1595.336226] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Deleting the datastore file [datastore1] 9d5d29ea-be92-4881-9fc8-fea3f2f442d0 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1595.336363] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-115063be-0c58-42a9-b17a-0240cc34636b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.344955] env[62510]: DEBUG oslo_vmware.api [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for the task: (returnval){ [ 1595.344955] env[62510]: value = "task-1768728" [ 1595.344955] env[62510]: _type = "Task" [ 1595.344955] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.350320] env[62510]: DEBUG nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1595.359506] env[62510]: DEBUG oslo_vmware.api [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768728, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.379397] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1595.379677] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1595.379873] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1595.380169] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1595.380410] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1595.380847] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1595.380998] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1595.381218] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1595.381411] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1595.381681] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1595.382079] env[62510]: DEBUG nova.virt.hardware [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1595.383304] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a263b2-344d-4f44-b774-9fc3eea4551b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.393790] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43181b4-b125-4d70-aef3-29b5eaf80a25 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.429332] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523bb9d2-89ba-09df-9861-81eeca59e80f, 'name': SearchDatastore_Task, 'duration_secs': 0.011448} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.430722] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f76930a2-99ba-468f-b1a2-6cafa63558e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.438862] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1595.438862] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520bfa61-5382-53ab-1d18-94566d951c1a" [ 1595.438862] env[62510]: _type = "Task" [ 1595.438862] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.454272] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520bfa61-5382-53ab-1d18-94566d951c1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.489812] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768726, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083642} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.490676] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1595.491108] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae2fab3-1cc7-4b0f-b2d7-139aa543f8a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.504901] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 3266d254-4a75-4fd3-b4e7-ebeb86467cbe] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1595.515968] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] fa43a538-1aae-4642-8370-70f2a49ca92c/fa43a538-1aae-4642-8370-70f2a49ca92c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1595.516686] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ec24642-29e3-4dee-8cf9-313082cb9d12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.541314] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1595.541314] env[62510]: value = "task-1768729" [ 1595.541314] env[62510]: _type = "Task" [ 1595.541314] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.553996] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768729, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.697801] env[62510]: INFO nova.compute.manager [-] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Took 1.76 seconds to deallocate network for instance. [ 1595.715675] env[62510]: ERROR nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [req-0296ea57-7e74-4d29-b57a-6b65f6ba3b17] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0296ea57-7e74-4d29-b57a-6b65f6ba3b17"}]} [ 1595.733329] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1595.750476] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1595.750706] env[62510]: DEBUG nova.compute.provider_tree [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.769172] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1595.790872] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1595.794891] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.857491] env[62510]: DEBUG oslo_vmware.api [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Task: {'id': task-1768728, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216376} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.860692] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1595.861188] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1595.861440] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1595.861659] env[62510]: INFO nova.compute.manager [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1595.861953] env[62510]: DEBUG oslo.service.loopingcall [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1595.862420] env[62510]: DEBUG nova.compute.manager [-] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1595.862577] env[62510]: DEBUG nova.network.neutron [-] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1595.957178] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520bfa61-5382-53ab-1d18-94566d951c1a, 'name': SearchDatastore_Task, 'duration_secs': 0.013996} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.959095] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.959766] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a/0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1595.960037] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12de2c11-6ccf-4db4-9b86-fc39c0221ba3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.968750] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1595.968750] env[62510]: value = "task-1768730" [ 1595.968750] env[62510]: _type = "Task" [ 1595.968750] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.981320] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.018076] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 5588650b-c450-489a-a456-3b580a5b9114] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1596.057125] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768729, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.208456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.413618] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e17f54-4d82-44d5-a381-b4bff4151ebf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.423243] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f8ecf9-f619-4818-8236-4ec92a0bfbef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.463573] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ade0241-af60-4da2-bb5d-ec7d624223c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.477208] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f736390-7607-4ce1-a2c2-04af0358f812 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.489967] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768730, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.499703] env[62510]: DEBUG nova.compute.provider_tree [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1596.521853] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0604d37b-38c5-4510-894e-b26fd44e17c5] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1596.543671] env[62510]: DEBUG nova.network.neutron [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Updated VIF entry in instance network info cache for port 9a53a8d4-8b7d-4167-b888-f20b2fce23c5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1596.544610] env[62510]: DEBUG nova.network.neutron [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Updating instance_info_cache with network_info: [{"id": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "address": "fa:16:3e:17:66:3e", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a53a8d4-8b", "ovs_interfaceid": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.557673] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768729, 'name': ReconfigVM_Task, 'duration_secs': 0.62661} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.558795] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Reconfigured VM instance instance-00000035 to attach disk [datastore1] fa43a538-1aae-4642-8370-70f2a49ca92c/fa43a538-1aae-4642-8370-70f2a49ca92c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1596.560077] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa7524f3-0809-4fbd-a8de-4d82d25ca0eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.570399] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1596.570399] env[62510]: value = "task-1768731" [ 1596.570399] env[62510]: _type = "Task" [ 1596.570399] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.582765] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768731, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.668797] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.668797] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.722566] env[62510]: DEBUG nova.network.neutron [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Successfully updated port: 30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1596.985530] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652077} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.985848] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a/0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1596.986134] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1596.986424] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7fcdff8-94a9-43af-b357-1775ed95b522 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.995244] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1596.995244] env[62510]: value = "task-1768732" [ 1596.995244] env[62510]: _type = "Task" [ 1596.995244] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.008083] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768732, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.027275] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1597.034643] env[62510]: ERROR nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [req-e9b543de-fa9d-42e9-b565-99ff0aaf5bcf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e9b543de-fa9d-42e9-b565-99ff0aaf5bcf"}]} [ 1597.050983] env[62510]: DEBUG oslo_concurrency.lockutils [req-81129d09-1832-4cf6-8aba-17425df05f07 req-2b495635-db61-48f0-b34a-8c7c7d49cdb2 service nova] Releasing lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.054251] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1597.075553] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1597.075797] env[62510]: DEBUG nova.compute.provider_tree [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1597.086447] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768731, 'name': Rename_Task, 'duration_secs': 0.170431} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.086447] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1597.086785] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15348871-1b65-41a0-bb4e-f7babdbb726a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.089742] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1597.099102] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1597.099102] env[62510]: value = "task-1768733" [ 1597.099102] env[62510]: _type = "Task" [ 1597.099102] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.109194] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.114145] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1597.171834] env[62510]: DEBUG nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1597.223558] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1597.224485] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquired lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1597.224485] env[62510]: DEBUG nova.network.neutron [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1597.232195] env[62510]: DEBUG nova.network.neutron [-] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.366456] env[62510]: DEBUG nova.network.neutron [-] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.509589] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768732, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07432} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.509889] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1597.510850] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d1cd39-d98a-4eee-b1f0-106b88147729 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.542710] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a/0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1597.546825] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44bdfde7-3553-45f0-bb4a-b2871aeec801 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.571425] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1597.571425] env[62510]: value = "task-1768734" [ 1597.571425] env[62510]: _type = "Task" [ 1597.571425] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.582948] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768734, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.619578] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768733, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.694317] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.736121] env[62510]: INFO nova.compute.manager [-] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Took 2.56 seconds to deallocate network for instance. [ 1597.777631] env[62510]: DEBUG nova.network.neutron [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1597.819083] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c139bab-0d41-492a-943d-b0b829df2f67 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.828681] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223c06b9-bd4b-49dc-86eb-60cea10d4392 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.836729] env[62510]: DEBUG nova.compute.manager [req-bca0c341-4374-4d6b-8a00-5f11fea811e4 req-54d742f5-d433-4539-aa76-9323ea8e175a service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Received event network-vif-plugged-30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1597.836994] env[62510]: DEBUG oslo_concurrency.lockutils [req-bca0c341-4374-4d6b-8a00-5f11fea811e4 req-54d742f5-d433-4539-aa76-9323ea8e175a service nova] Acquiring lock "13cdba63-5db4-419f-9e0b-244832d7866b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.837219] env[62510]: DEBUG oslo_concurrency.lockutils [req-bca0c341-4374-4d6b-8a00-5f11fea811e4 req-54d742f5-d433-4539-aa76-9323ea8e175a service nova] Lock "13cdba63-5db4-419f-9e0b-244832d7866b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.837381] env[62510]: DEBUG oslo_concurrency.lockutils [req-bca0c341-4374-4d6b-8a00-5f11fea811e4 req-54d742f5-d433-4539-aa76-9323ea8e175a service nova] Lock "13cdba63-5db4-419f-9e0b-244832d7866b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.837547] env[62510]: DEBUG nova.compute.manager [req-bca0c341-4374-4d6b-8a00-5f11fea811e4 req-54d742f5-d433-4539-aa76-9323ea8e175a service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] No waiting events found dispatching network-vif-plugged-30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1597.837703] env[62510]: WARNING nova.compute.manager [req-bca0c341-4374-4d6b-8a00-5f11fea811e4 req-54d742f5-d433-4539-aa76-9323ea8e175a service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Received unexpected event network-vif-plugged-30661ae8-22f8-4f9e-91d9-67d7a31e134c for instance with vm_state building and task_state spawning. [ 1597.872023] env[62510]: INFO nova.compute.manager [-] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Took 2.01 seconds to deallocate network for instance. [ 1597.873994] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf22a9a5-e319-46a6-93a5-6733e7cfc64a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.886856] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f72977f-aa3f-4576-b02b-179008a8172a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.902687] env[62510]: DEBUG nova.compute.provider_tree [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1598.082440] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768734, 'name': ReconfigVM_Task, 'duration_secs': 0.390694} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.082882] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a/0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1598.083239] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62510) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1598.084635] env[62510]: DEBUG nova.network.neutron [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updating instance_info_cache with network_info: [{"id": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "address": "fa:16:3e:56:11:04", "network": {"id": "d642e7b5-7af1-4b88-a23f-8cc8ee29428b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1437968534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2a30849f1f4574a890619b3fff7010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30661ae8-22", "ovs_interfaceid": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.085884] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-cc636d65-14dc-40c1-a335-ad349562c81b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.096088] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1598.096088] env[62510]: value = "task-1768735" [ 1598.096088] env[62510]: _type = "Task" [ 1598.096088] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.107445] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768735, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.110629] env[62510]: DEBUG oslo_vmware.api [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768733, 'name': PowerOnVM_Task, 'duration_secs': 0.522942} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.110890] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1598.111107] env[62510]: INFO nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Took 5.43 seconds to spawn the instance on the hypervisor. [ 1598.111298] env[62510]: DEBUG nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1598.112392] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb721ee-2725-4cd5-9f40-96dd14bb209a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.142157] env[62510]: DEBUG nova.compute.manager [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1598.142370] env[62510]: DEBUG nova.compute.manager [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing instance network info cache due to event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1598.142828] env[62510]: DEBUG oslo_concurrency.lockutils [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] Acquiring lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.142987] env[62510]: DEBUG oslo_concurrency.lockutils [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] Acquired lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.143173] env[62510]: DEBUG nova.network.neutron [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1598.247230] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.382927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.436685] env[62510]: DEBUG nova.scheduler.client.report [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1598.437043] env[62510]: DEBUG nova.compute.provider_tree [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 77 to 78 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1598.437276] env[62510]: DEBUG nova.compute.provider_tree [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1598.590261] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Releasing lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.590669] env[62510]: DEBUG nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Instance network_info: |[{"id": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "address": "fa:16:3e:56:11:04", "network": {"id": "d642e7b5-7af1-4b88-a23f-8cc8ee29428b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1437968534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2a30849f1f4574a890619b3fff7010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30661ae8-22", "ovs_interfaceid": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1598.591125] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:11:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30661ae8-22f8-4f9e-91d9-67d7a31e134c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1598.602487] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Creating folder: Project (cd2a30849f1f4574a890619b3fff7010). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1598.603528] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-527ea959-64e4-41ca-919b-5e507abd955f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.618379] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768735, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.080024} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.618681] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62510) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1598.619631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab728de1-2a40-4740-ad9e-187f82d75450 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.628754] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Created folder: Project (cd2a30849f1f4574a890619b3fff7010) in parent group-v367197. [ 1598.628950] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Creating folder: Instances. Parent ref: group-v367339. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1598.632270] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87706f62-85e0-4f55-9137-af89bc54c9c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.634483] env[62510]: INFO nova.compute.manager [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Took 43.72 seconds to build instance. [ 1598.662990] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a/ephemeral_0.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.666259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-77747de3-3b44-4863-8338-8eb43ea8eea9 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "fa43a538-1aae-4642-8370-70f2a49ca92c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.947s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.666542] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6f5445a-8ad8-409a-92a6-4947e22ab374 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.681375] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Created folder: Instances in parent group-v367339. [ 1598.681651] env[62510]: DEBUG oslo.service.loopingcall [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.682312] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1598.682863] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70de438d-9c48-46f0-adc3-b475790a8ca1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.700947] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1598.700947] env[62510]: value = "task-1768738" [ 1598.700947] env[62510]: _type = "Task" [ 1598.700947] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.709163] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1598.709163] env[62510]: value = "task-1768739" [ 1598.709163] env[62510]: _type = "Task" [ 1598.709163] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.713542] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768738, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.723185] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768739, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.947845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.647s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.948263] env[62510]: DEBUG nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1598.953218] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.931s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.954029] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.956314] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.856s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.956696] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.958959] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.129s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.959372] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.962182] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.102s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.963520] env[62510]: INFO nova.compute.claims [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1598.968907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.004148] env[62510]: INFO nova.scheduler.client.report [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Deleted allocations for instance 87d1d75e-41c4-42e6-bf58-deabb71400e1 [ 1599.010452] env[62510]: INFO nova.scheduler.client.report [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Deleted allocations for instance 34a464e2-d38e-4c24-a487-c62a4f484667 [ 1599.025013] env[62510]: INFO nova.scheduler.client.report [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted allocations for instance 58e71d67-aed2-4329-ab60-4dfacff1d0a2 [ 1599.065497] env[62510]: DEBUG nova.network.neutron [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updated VIF entry in instance network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1599.065896] env[62510]: DEBUG nova.network.neutron [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [{"id": "b93d3484-b909-4060-aef6-1f45f91f2325", "address": "fa:16:3e:db:60:53", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d3484-b9", "ovs_interfaceid": "b93d3484-b909-4060-aef6-1f45f91f2325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.215809] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768738, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.228095] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768739, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.471785] env[62510]: DEBUG nova.compute.utils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1599.476970] env[62510]: DEBUG nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1599.476970] env[62510]: DEBUG nova.network.neutron [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1599.516713] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bc0748ed-c5bc-4d47-9137-6d3481e34650 tempest-ServersTestMultiNic-1170217472 tempest-ServersTestMultiNic-1170217472-project-member] Lock "87d1d75e-41c4-42e6-bf58-deabb71400e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.015s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.520333] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a3d77851-01c5-4940-827e-f1f4ec1edba2 tempest-ServersAdminNegativeTestJSON-353363092 tempest-ServersAdminNegativeTestJSON-353363092-project-member] Lock "34a464e2-d38e-4c24-a487-c62a4f484667" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.200s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.535543] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4cfc52b1-d6b8-48ce-98b9-090206da0c3f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "58e71d67-aed2-4329-ab60-4dfacff1d0a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.627s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.538482] env[62510]: DEBUG nova.policy [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8529085eb049423fa9a44257e9e05ff9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3400f6fc16e4c3f9f26232efec47435', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1599.568422] env[62510]: DEBUG oslo_concurrency.lockutils [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] Releasing lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.568692] env[62510]: DEBUG nova.compute.manager [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Received event network-vif-deleted-766401c7-3f55-48f7-a695-d2db7a829ade {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1599.568886] env[62510]: DEBUG nova.compute.manager [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Received event network-vif-deleted-71218680-7c53-442a-ab27-cfa4db01f20c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1599.569071] env[62510]: DEBUG nova.compute.manager [req-4177a962-e81e-48c4-8a06-3ad5e1e0f60d req-a0c58920-23cf-40d2-965d-4e857625bd5f service nova] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Received event network-vif-deleted-f3011c4d-9d43-4939-9157-df0532a51861 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1599.714979] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768738, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.732897] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768739, 'name': CreateVM_Task, 'duration_secs': 0.529325} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.733144] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1599.733936] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.734117] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.734445] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1599.734745] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1def653-f26c-4a0b-862e-b78c59735c7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.743017] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1599.743017] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528d87fa-0cc3-3b6e-104b-ae3132ade797" [ 1599.743017] env[62510]: _type = "Task" [ 1599.743017] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.754528] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528d87fa-0cc3-3b6e-104b-ae3132ade797, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.815284] env[62510]: DEBUG nova.compute.manager [None req-5df00c08-4467-46e8-a9d4-6a35fa350249 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1599.816259] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7080185a-5163-48d3-909f-0f482c45a489 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.858579] env[62510]: DEBUG nova.network.neutron [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Successfully created port: 246fe90c-d755-46fd-a256-e1f26ac76e09 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1599.976652] env[62510]: DEBUG nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1600.218223] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768738, 'name': ReconfigVM_Task, 'duration_secs': 1.347023} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.218542] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a/ephemeral_0.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1600.219223] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef9bdba1-716f-4ba4-99a3-04b08153cfa5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.229824] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1600.229824] env[62510]: value = "task-1768740" [ 1600.229824] env[62510]: _type = "Task" [ 1600.229824] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.240471] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768740, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.256799] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528d87fa-0cc3-3b6e-104b-ae3132ade797, 'name': SearchDatastore_Task, 'duration_secs': 0.018734} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.261197] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.261197] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1600.261197] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.261197] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.261400] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1600.261875] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c82a093e-af92-48de-9561-66cfcfbeada0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.273776] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1600.273776] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1600.274969] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf151399-fbb5-4868-8737-bd8ca8936139 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.284935] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1600.284935] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529c438a-660f-408b-756a-9506a6df0cbe" [ 1600.284935] env[62510]: _type = "Task" [ 1600.284935] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.295275] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529c438a-660f-408b-756a-9506a6df0cbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.329081] env[62510]: INFO nova.compute.manager [None req-5df00c08-4467-46e8-a9d4-6a35fa350249 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] instance snapshotting [ 1600.329769] env[62510]: DEBUG nova.objects.instance [None req-5df00c08-4467-46e8-a9d4-6a35fa350249 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lazy-loading 'flavor' on Instance uuid fa43a538-1aae-4642-8370-70f2a49ca92c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1600.441028] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "fa43a538-1aae-4642-8370-70f2a49ca92c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.441349] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "fa43a538-1aae-4642-8370-70f2a49ca92c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.441573] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "fa43a538-1aae-4642-8370-70f2a49ca92c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.441777] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "fa43a538-1aae-4642-8370-70f2a49ca92c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.441972] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "fa43a538-1aae-4642-8370-70f2a49ca92c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.444529] env[62510]: INFO nova.compute.manager [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Terminating instance [ 1600.574617] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0126115c-8a4c-4a88-858c-f2ea2902ed5e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.585585] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96fd400-8d26-4d34-a9dc-faf77a656402 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.618043] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54774ba4-4984-498f-b2ac-bf076c8b53d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.626924] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1ca839-7ff5-4eb4-9acb-efefce183f11 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.643597] env[62510]: DEBUG nova.compute.provider_tree [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1600.741818] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768740, 'name': Rename_Task, 'duration_secs': 0.205481} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.742188] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1600.742504] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ff77f35-51b6-446f-9434-3b38f2bad200 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.751511] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1600.751511] env[62510]: value = "task-1768741" [ 1600.751511] env[62510]: _type = "Task" [ 1600.751511] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.761377] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768741, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.797542] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529c438a-660f-408b-756a-9506a6df0cbe, 'name': SearchDatastore_Task, 'duration_secs': 0.012751} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.798552] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e0bcb3a-2b66-4d47-b9d2-4177f52546ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.805728] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1600.805728] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5211c665-13fb-773a-9b50-d01d33e14bc8" [ 1600.805728] env[62510]: _type = "Task" [ 1600.805728] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.814537] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5211c665-13fb-773a-9b50-d01d33e14bc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.835433] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e621e8-6580-4d84-b533-72ea90d0038b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.854425] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed63e396-41cf-42e1-ad98-e2d5603d4fc1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.905956] env[62510]: DEBUG nova.compute.manager [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Received event network-changed-30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1600.906518] env[62510]: DEBUG nova.compute.manager [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Refreshing instance network info cache due to event network-changed-30661ae8-22f8-4f9e-91d9-67d7a31e134c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1600.906616] env[62510]: DEBUG oslo_concurrency.lockutils [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] Acquiring lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.906873] env[62510]: DEBUG oslo_concurrency.lockutils [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] Acquired lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.906947] env[62510]: DEBUG nova.network.neutron [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Refreshing network info cache for port 30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.955847] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "refresh_cache-fa43a538-1aae-4642-8370-70f2a49ca92c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.956314] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquired lock "refresh_cache-fa43a538-1aae-4642-8370-70f2a49ca92c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.956736] env[62510]: DEBUG nova.network.neutron [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1600.991405] env[62510]: DEBUG nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1601.031421] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1601.034000] env[62510]: DEBUG nova.virt.hardware [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1601.035800] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7682ccba-ba2a-4f8c-8d4d-a2673f1e5e5e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.044642] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "90869287-22bd-438c-8684-56f5d43e3ca8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.044951] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.052418] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1696c66b-7f44-477b-bc2a-85ea00676856 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.147098] env[62510]: DEBUG nova.scheduler.client.report [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1601.196554] env[62510]: DEBUG nova.compute.manager [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1601.196771] env[62510]: DEBUG nova.compute.manager [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing instance network info cache due to event network-changed-b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1601.196996] env[62510]: DEBUG oslo_concurrency.lockutils [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] Acquiring lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.198300] env[62510]: DEBUG oslo_concurrency.lockutils [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] Acquired lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.198405] env[62510]: DEBUG nova.network.neutron [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Refreshing network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1601.269199] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768741, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.317956] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5211c665-13fb-773a-9b50-d01d33e14bc8, 'name': SearchDatastore_Task, 'duration_secs': 0.018835} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.318273] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.318612] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 13cdba63-5db4-419f-9e0b-244832d7866b/13cdba63-5db4-419f-9e0b-244832d7866b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1601.318833] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07e705d7-820f-4f1a-a263-2079f16165d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.327937] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1601.327937] env[62510]: value = "task-1768742" [ 1601.327937] env[62510]: _type = "Task" [ 1601.327937] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.338772] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.367941] env[62510]: DEBUG nova.compute.manager [None req-5df00c08-4467-46e8-a9d4-6a35fa350249 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Instance disappeared during snapshot {{(pid=62510) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1601.484481] env[62510]: DEBUG nova.network.neutron [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1601.552361] env[62510]: DEBUG nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1601.564267] env[62510]: DEBUG nova.compute.manager [None req-5df00c08-4467-46e8-a9d4-6a35fa350249 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Found 0 images (rotation: 2) {{(pid=62510) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1601.582860] env[62510]: DEBUG nova.network.neutron [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.591090] env[62510]: DEBUG nova.network.neutron [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Successfully updated port: 246fe90c-d755-46fd-a256-e1f26ac76e09 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1601.651902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.652455] env[62510]: DEBUG nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1601.658286] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 31.274s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.761513] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "0082eb97-26e9-4196-b8e3-63460d32dd19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.763122] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.778248] env[62510]: DEBUG oslo_vmware.api [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1768741, 'name': PowerOnVM_Task, 'duration_secs': 0.629733} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.778671] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1601.779241] env[62510]: INFO nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Took 11.28 seconds to spawn the instance on the hypervisor. [ 1601.779892] env[62510]: DEBUG nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1601.781127] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20888e0-4650-4b91-81a4-b3bb378b7a5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.848806] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768742, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.916415] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c3c82f-e51f-d35c-ffae-9074e967168c/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1601.917381] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753af01a-a2dc-44c7-b0a2-b63916f93a83 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.924840] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c3c82f-e51f-d35c-ffae-9074e967168c/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1601.925033] env[62510]: ERROR oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c3c82f-e51f-d35c-ffae-9074e967168c/disk-0.vmdk due to incomplete transfer. [ 1601.925193] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c96887a6-8bf8-47de-8bfd-83aac994e71c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.935736] env[62510]: DEBUG oslo_vmware.rw_handles [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c3c82f-e51f-d35c-ffae-9074e967168c/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1601.936077] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Uploaded image b54fc7f8-0408-41bd-abcd-6c673fa40237 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1601.938331] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1601.938639] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9cdfc416-baac-48b7-b402-0d2f098bee1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.948847] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1601.948847] env[62510]: value = "task-1768743" [ 1601.948847] env[62510]: _type = "Task" [ 1601.948847] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.961498] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768743, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.987075] env[62510]: DEBUG nova.network.neutron [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updated VIF entry in instance network info cache for port 30661ae8-22f8-4f9e-91d9-67d7a31e134c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1601.987075] env[62510]: DEBUG nova.network.neutron [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updating instance_info_cache with network_info: [{"id": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "address": "fa:16:3e:56:11:04", "network": {"id": "d642e7b5-7af1-4b88-a23f-8cc8ee29428b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1437968534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2a30849f1f4574a890619b3fff7010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30661ae8-22", "ovs_interfaceid": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.082095] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.086810] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Releasing lock "refresh_cache-fa43a538-1aae-4642-8370-70f2a49ca92c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.087231] env[62510]: DEBUG nova.compute.manager [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1602.087424] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1602.088413] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4807125d-b0e5-479c-8f4d-5b904e4639d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.092221] env[62510]: DEBUG nova.network.neutron [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updated VIF entry in instance network info cache for port b93d3484-b909-4060-aef6-1f45f91f2325. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1602.092633] env[62510]: DEBUG nova.network.neutron [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [{"id": "b93d3484-b909-4060-aef6-1f45f91f2325", "address": "fa:16:3e:db:60:53", "network": {"id": "457a7f06-5f1c-485e-8589-43d0e40d3fc5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-2119303260-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c995c2427bd4f7da644d0a8df7d69da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e2153f70-3d14-42ab-8bb3-be78296dd3b8", "external-id": "nsx-vlan-transportzone-532", "segmentation_id": 532, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb93d3484-b9", "ovs_interfaceid": "b93d3484-b909-4060-aef6-1f45f91f2325", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.096863] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "refresh_cache-641628d1-bb6d-4207-89b9-98014328e028" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.097032] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquired lock "refresh_cache-641628d1-bb6d-4207-89b9-98014328e028" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.097196] env[62510]: DEBUG nova.network.neutron [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1602.102865] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1602.102865] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33453562-be63-4925-9b80-7c4f28f3db79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.109670] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1602.109670] env[62510]: value = "task-1768744" [ 1602.109670] env[62510]: _type = "Task" [ 1602.109670] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.119841] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.159955] env[62510]: DEBUG nova.compute.utils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1602.161458] env[62510]: DEBUG nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1602.161600] env[62510]: DEBUG nova.network.neutron [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1602.166067] env[62510]: INFO nova.compute.claims [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1602.212277] env[62510]: DEBUG nova.policy [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97a7f1ca55d549a3985e95b6bbc665f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94a46473611d4b22be7c66c909d1b348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1602.302233] env[62510]: INFO nova.compute.manager [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Took 49.51 seconds to build instance. [ 1602.342646] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768742, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.458671] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768743, 'name': Destroy_Task, 'duration_secs': 0.494068} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.458954] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Destroyed the VM [ 1602.459227] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1602.459490] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-839ad97d-fa31-455e-b5f3-6bb5b3ef2387 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.466620] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1602.466620] env[62510]: value = "task-1768745" [ 1602.466620] env[62510]: _type = "Task" [ 1602.466620] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.475593] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768745, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.489785] env[62510]: DEBUG oslo_concurrency.lockutils [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] Releasing lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.489785] env[62510]: DEBUG nova.compute.manager [req-bb03ad82-d842-49e5-8608-15b90d7393e8 req-fd7958d1-a174-4d41-b3b9-1b6448f6ab91 service nova] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Received event network-vif-deleted-3ed074cb-cedf-490b-b36a-d695cbf28633 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1602.502943] env[62510]: DEBUG nova.network.neutron [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Successfully created port: 6eb8d1eb-fc0d-41fd-a107-12a791bcd483 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1602.597203] env[62510]: DEBUG oslo_concurrency.lockutils [req-d9cb0c48-e5da-4681-a94e-f5e758cd06ce req-46a1873c-fc0b-41a6-a7eb-79f3a7810835 service nova] Releasing lock "refresh_cache-b004fba7-13e0-40f0-827d-8d09b7717176" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.619939] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.653135] env[62510]: DEBUG nova.network.neutron [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1602.671650] env[62510]: DEBUG nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1602.677569] env[62510]: INFO nova.compute.resource_tracker [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating resource usage from migration 8d1549df-6eeb-4b96-9648-01fd9071d48d [ 1602.804436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-10c23fda-815a-42e4-87ad-1fe7cba74dc6 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.324s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.848465] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768742, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.918110] env[62510]: DEBUG nova.network.neutron [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Updating instance_info_cache with network_info: [{"id": "246fe90c-d755-46fd-a256-e1f26ac76e09", "address": "fa:16:3e:e3:ed:d9", "network": {"id": "5b3a4aa9-0608-4343-acbf-d9955b6ad433", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1316885967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3400f6fc16e4c3f9f26232efec47435", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd998416-f3d6-4a62-b828-5011063ce76a", "external-id": "nsx-vlan-transportzone-57", "segmentation_id": 57, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fe90c-d7", "ovs_interfaceid": "246fe90c-d755-46fd-a256-e1f26ac76e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.979081] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768745, 'name': RemoveSnapshot_Task, 'duration_secs': 0.39384} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.979081] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1602.979081] env[62510]: DEBUG nova.compute.manager [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1602.979638] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b1c104-3f02-49e2-ab81-42372a69abfc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.123911] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768744, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.177268] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddef4bb-57aa-4509-879f-d72bf934492a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.188956] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e48dc0-b516-4fc9-b18d-e29c7b08274d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.222805] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d328cb8-5273-4f6e-a875-550ad6358fc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.231622] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247ed8f3-3d03-450d-80e6-03dbffe30c33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.248166] env[62510]: DEBUG nova.compute.provider_tree [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1603.310480] env[62510]: DEBUG nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1603.325281] env[62510]: DEBUG nova.compute.manager [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Received event network-vif-plugged-246fe90c-d755-46fd-a256-e1f26ac76e09 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1603.325508] env[62510]: DEBUG oslo_concurrency.lockutils [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] Acquiring lock "641628d1-bb6d-4207-89b9-98014328e028-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.325778] env[62510]: DEBUG oslo_concurrency.lockutils [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] Lock "641628d1-bb6d-4207-89b9-98014328e028-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.325884] env[62510]: DEBUG oslo_concurrency.lockutils [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] Lock "641628d1-bb6d-4207-89b9-98014328e028-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.327137] env[62510]: DEBUG nova.compute.manager [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] No waiting events found dispatching network-vif-plugged-246fe90c-d755-46fd-a256-e1f26ac76e09 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1603.327137] env[62510]: WARNING nova.compute.manager [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Received unexpected event network-vif-plugged-246fe90c-d755-46fd-a256-e1f26ac76e09 for instance with vm_state building and task_state spawning. [ 1603.327137] env[62510]: DEBUG nova.compute.manager [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Received event network-changed-246fe90c-d755-46fd-a256-e1f26ac76e09 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1603.327137] env[62510]: DEBUG nova.compute.manager [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Refreshing instance network info cache due to event network-changed-246fe90c-d755-46fd-a256-e1f26ac76e09. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1603.327137] env[62510]: DEBUG oslo_concurrency.lockutils [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] Acquiring lock "refresh_cache-641628d1-bb6d-4207-89b9-98014328e028" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.344733] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768742, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.68389} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.344984] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 13cdba63-5db4-419f-9e0b-244832d7866b/13cdba63-5db4-419f-9e0b-244832d7866b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1603.345212] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1603.345484] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a62df830-1d75-45e2-aeba-f2e93c3510ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.353447] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1603.353447] env[62510]: value = "task-1768746" [ 1603.353447] env[62510]: _type = "Task" [ 1603.353447] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.364781] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.424452] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Releasing lock "refresh_cache-641628d1-bb6d-4207-89b9-98014328e028" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.424801] env[62510]: DEBUG nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Instance network_info: |[{"id": "246fe90c-d755-46fd-a256-e1f26ac76e09", "address": "fa:16:3e:e3:ed:d9", "network": {"id": "5b3a4aa9-0608-4343-acbf-d9955b6ad433", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1316885967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3400f6fc16e4c3f9f26232efec47435", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd998416-f3d6-4a62-b828-5011063ce76a", "external-id": "nsx-vlan-transportzone-57", "segmentation_id": 57, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fe90c-d7", "ovs_interfaceid": "246fe90c-d755-46fd-a256-e1f26ac76e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1603.425112] env[62510]: DEBUG oslo_concurrency.lockutils [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] Acquired lock "refresh_cache-641628d1-bb6d-4207-89b9-98014328e028" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.425355] env[62510]: DEBUG nova.network.neutron [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Refreshing network info cache for port 246fe90c-d755-46fd-a256-e1f26ac76e09 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1603.426610] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:ed:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bd998416-f3d6-4a62-b828-5011063ce76a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '246fe90c-d755-46fd-a256-e1f26ac76e09', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1603.434078] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Creating folder: Project (d3400f6fc16e4c3f9f26232efec47435). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1603.434522] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76da8ef4-2ca1-4b96-8f7c-5c19a908ffa0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.449284] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Created folder: Project (d3400f6fc16e4c3f9f26232efec47435) in parent group-v367197. [ 1603.449509] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Creating folder: Instances. Parent ref: group-v367342. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1603.449751] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e734dd2e-905f-4736-92ca-0b6a2b3687a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.463014] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Created folder: Instances in parent group-v367342. [ 1603.463014] env[62510]: DEBUG oslo.service.loopingcall [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.463014] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1603.463014] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6b0d4a85-97e5-44a5-b639-f76877294eb6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.485503] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1603.485503] env[62510]: value = "task-1768749" [ 1603.485503] env[62510]: _type = "Task" [ 1603.485503] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.493976] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768749, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.497859] env[62510]: INFO nova.compute.manager [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Shelve offloading [ 1603.620910] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768744, 'name': PowerOffVM_Task, 'duration_secs': 1.033982} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.621212] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1603.621382] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1603.621636] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4705da7c-9379-4d58-bdc8-73a962291124 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.650790] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1603.651128] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1603.651324] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Deleting the datastore file [datastore1] fa43a538-1aae-4642-8370-70f2a49ca92c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1603.651646] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd12182b-26f2-4cc0-b8e1-922b2ccead73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.659619] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for the task: (returnval){ [ 1603.659619] env[62510]: value = "task-1768751" [ 1603.659619] env[62510]: _type = "Task" [ 1603.659619] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.668073] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.694014] env[62510]: DEBUG nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1603.719281] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1603.719579] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1603.719745] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1603.719921] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1603.720080] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1603.720231] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1603.720693] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1603.720917] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1603.721149] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1603.721362] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1603.721468] env[62510]: DEBUG nova.virt.hardware [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1603.722353] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c57b09-35c7-4d06-ab1b-2a737db2b4bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.731476] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2b5367-2f6a-420f-b6e2-247e80fe71b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.752683] env[62510]: DEBUG nova.scheduler.client.report [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1603.838193] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.865898] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066063} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.866211] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1603.867112] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e50bc30-4ef7-4487-9a8e-5f807d982156 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.894502] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 13cdba63-5db4-419f-9e0b-244832d7866b/13cdba63-5db4-419f-9e0b-244832d7866b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1603.895133] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d7962e1-7053-403d-a3a7-69b9ca2ff25d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.918157] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1603.918157] env[62510]: value = "task-1768752" [ 1603.918157] env[62510]: _type = "Task" [ 1603.918157] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.934420] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768752, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.999630] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768749, 'name': CreateVM_Task, 'duration_secs': 0.467943} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.999861] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1604.000941] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.000941] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.001174] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1604.001712] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1604.002372] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0c801d8-97fa-4505-88c9-ee144eb6c004 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.004423] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe271cbe-aab9-4945-b96f-7292eb13f001 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.012635] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1604.012635] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5290f5b9-9efb-8524-6995-594ba33fce80" [ 1604.012635] env[62510]: _type = "Task" [ 1604.012635] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.014805] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1604.014805] env[62510]: value = "task-1768753" [ 1604.014805] env[62510]: _type = "Task" [ 1604.014805] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.037812] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5290f5b9-9efb-8524-6995-594ba33fce80, 'name': SearchDatastore_Task, 'duration_secs': 0.01105} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.038562] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.038692] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1604.038956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.040093] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.040093] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1604.040440] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1604.040440] env[62510]: DEBUG nova.compute.manager [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1604.041078] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7a29c32-3f5a-4111-a988-e2d8a343707b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.043698] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b90e07c-3c4d-4d77-8cc0-49f4e650215d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.055731] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.055930] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.056173] env[62510]: DEBUG nova.network.neutron [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1604.058887] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1604.059105] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1604.059893] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51c06822-08a9-4287-a78c-a3e19e79febf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.067029] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1604.067029] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bc4218-236f-3351-b317-fbfa62e5228f" [ 1604.067029] env[62510]: _type = "Task" [ 1604.067029] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.077465] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bc4218-236f-3351-b317-fbfa62e5228f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.179168] env[62510]: DEBUG oslo_vmware.api [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Task: {'id': task-1768751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110257} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.179909] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1604.179909] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1604.180490] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1604.180693] env[62510]: INFO nova.compute.manager [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Took 2.09 seconds to destroy the instance on the hypervisor. [ 1604.181066] env[62510]: DEBUG oslo.service.loopingcall [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1604.181359] env[62510]: DEBUG nova.compute.manager [-] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1604.181499] env[62510]: DEBUG nova.network.neutron [-] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1604.257563] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.599s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.261691] env[62510]: INFO nova.compute.manager [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Migrating [ 1604.268612] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.969s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1604.269662] env[62510]: INFO nova.compute.claims [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1604.298594] env[62510]: DEBUG nova.network.neutron [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Updated VIF entry in instance network info cache for port 246fe90c-d755-46fd-a256-e1f26ac76e09. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1604.299418] env[62510]: DEBUG nova.network.neutron [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Updating instance_info_cache with network_info: [{"id": "246fe90c-d755-46fd-a256-e1f26ac76e09", "address": "fa:16:3e:e3:ed:d9", "network": {"id": "5b3a4aa9-0608-4343-acbf-d9955b6ad433", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1316885967-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3400f6fc16e4c3f9f26232efec47435", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bd998416-f3d6-4a62-b828-5011063ce76a", "external-id": "nsx-vlan-transportzone-57", "segmentation_id": 57, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246fe90c-d7", "ovs_interfaceid": "246fe90c-d755-46fd-a256-e1f26ac76e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.419508] env[62510]: DEBUG nova.network.neutron [-] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1604.431933] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768752, 'name': ReconfigVM_Task, 'duration_secs': 0.263098} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.432256] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 13cdba63-5db4-419f-9e0b-244832d7866b/13cdba63-5db4-419f-9e0b-244832d7866b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1604.433643] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8290ab2-1e28-458f-b0b0-cdb99ce4df59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.442753] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1604.442753] env[62510]: value = "task-1768754" [ 1604.442753] env[62510]: _type = "Task" [ 1604.442753] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.453932] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768754, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.462977] env[62510]: DEBUG nova.network.neutron [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Successfully updated port: 6eb8d1eb-fc0d-41fd-a107-12a791bcd483 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1604.578957] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bc4218-236f-3351-b317-fbfa62e5228f, 'name': SearchDatastore_Task, 'duration_secs': 0.010012} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.579975] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f1604cd-f94d-4f3a-bf64-cb3c6efcbeaf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.587272] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1604.587272] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5253f27f-16cf-b739-f7f3-16093cbdaff5" [ 1604.587272] env[62510]: _type = "Task" [ 1604.587272] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.597852] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5253f27f-16cf-b739-f7f3-16093cbdaff5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.786214] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.786404] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.786587] env[62510]: DEBUG nova.network.neutron [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1604.801803] env[62510]: DEBUG oslo_concurrency.lockutils [req-728d8d3c-77a9-4b99-b3e7-16410777010f req-0bba120c-3deb-4d3c-8571-73133c893c3a service nova] Releasing lock "refresh_cache-641628d1-bb6d-4207-89b9-98014328e028" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.925229] env[62510]: DEBUG nova.network.neutron [-] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.955083] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768754, 'name': Rename_Task, 'duration_secs': 0.159095} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.955619] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1604.955897] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c382068-8e1b-4244-9aee-15109d1c67c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.963491] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1604.963491] env[62510]: value = "task-1768755" [ 1604.963491] env[62510]: _type = "Task" [ 1604.963491] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.972449] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "refresh_cache-b5ff2a10-3c76-469a-86e0-ed3b135bca37" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.972564] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "refresh_cache-b5ff2a10-3c76-469a-86e0-ed3b135bca37" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.972711] env[62510]: DEBUG nova.network.neutron [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1604.974323] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768755, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.099826] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5253f27f-16cf-b739-f7f3-16093cbdaff5, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.100957] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1605.102606] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 641628d1-bb6d-4207-89b9-98014328e028/641628d1-bb6d-4207-89b9-98014328e028.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1605.102931] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20567d01-ac1c-48c0-b0d3-da7a723d0bf6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.110906] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1605.110906] env[62510]: value = "task-1768756" [ 1605.110906] env[62510]: _type = "Task" [ 1605.110906] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.122701] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768756, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.182040] env[62510]: DEBUG nova.network.neutron [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.369230] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "d1c20183-ba24-4a11-ad82-bf240d581322" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.369230] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.429286] env[62510]: INFO nova.compute.manager [-] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Took 1.25 seconds to deallocate network for instance. [ 1605.482215] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768755, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.515403] env[62510]: DEBUG nova.compute.manager [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Received event network-vif-plugged-6eb8d1eb-fc0d-41fd-a107-12a791bcd483 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1605.516365] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Acquiring lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.516365] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.516365] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.516365] env[62510]: DEBUG nova.compute.manager [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] No waiting events found dispatching network-vif-plugged-6eb8d1eb-fc0d-41fd-a107-12a791bcd483 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1605.516648] env[62510]: WARNING nova.compute.manager [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Received unexpected event network-vif-plugged-6eb8d1eb-fc0d-41fd-a107-12a791bcd483 for instance with vm_state building and task_state spawning. [ 1605.516648] env[62510]: DEBUG nova.compute.manager [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Received event network-changed-6eb8d1eb-fc0d-41fd-a107-12a791bcd483 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1605.516717] env[62510]: DEBUG nova.compute.manager [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Refreshing instance network info cache due to event network-changed-6eb8d1eb-fc0d-41fd-a107-12a791bcd483. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1605.517104] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Acquiring lock "refresh_cache-b5ff2a10-3c76-469a-86e0-ed3b135bca37" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.557435] env[62510]: DEBUG nova.network.neutron [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1605.624898] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768756, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.686849] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1605.794494] env[62510]: DEBUG nova.network.neutron [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance_info_cache with network_info: [{"id": "21d515b9-b00f-45cc-9437-318ee6bba755", "address": "fa:16:3e:66:30:96", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d515b9-b0", "ovs_interfaceid": "21d515b9-b00f-45cc-9437-318ee6bba755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.878885] env[62510]: DEBUG nova.network.neutron [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Updating instance_info_cache with network_info: [{"id": "6eb8d1eb-fc0d-41fd-a107-12a791bcd483", "address": "fa:16:3e:ac:5c:6e", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6eb8d1eb-fc", "ovs_interfaceid": "6eb8d1eb-fc0d-41fd-a107-12a791bcd483", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.918295] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7169beba-80ed-4102-b742-9c63ea2698c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.927159] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5842347-e424-4598-846e-e3f8b0dd31ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.962108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.963107] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f01ee2-60e8-4680-9d3f-f13be53f8136 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.973942] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ed472e-cbd4-4975-a5ee-bff61dd5e772 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.983229] env[62510]: DEBUG oslo_vmware.api [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768755, 'name': PowerOnVM_Task, 'duration_secs': 0.834392} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.983875] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1605.984103] env[62510]: INFO nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Took 10.63 seconds to spawn the instance on the hypervisor. [ 1605.984312] env[62510]: DEBUG nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1605.985063] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedc456c-10e7-4e92-8b69-c9a8c0f89191 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.995449] env[62510]: DEBUG nova.compute.provider_tree [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1606.124989] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768756, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522989} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.125592] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 641628d1-bb6d-4207-89b9-98014328e028/641628d1-bb6d-4207-89b9-98014328e028.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1606.125721] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1606.125943] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ad0a31b-e3f6-4813-b752-1eb39d294380 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.133274] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1606.133274] env[62510]: value = "task-1768757" [ 1606.133274] env[62510]: _type = "Task" [ 1606.133274] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.142902] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.298579] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.347172] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1606.347172] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0cbeeb-e259-4269-8bce-feed2ba10305 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.354755] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1606.355028] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cc8249e-3012-455a-a9f5-0e37f3ab96f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.382673] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "refresh_cache-b5ff2a10-3c76-469a-86e0-ed3b135bca37" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.383135] env[62510]: DEBUG nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Instance network_info: |[{"id": "6eb8d1eb-fc0d-41fd-a107-12a791bcd483", "address": "fa:16:3e:ac:5c:6e", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6eb8d1eb-fc", "ovs_interfaceid": "6eb8d1eb-fc0d-41fd-a107-12a791bcd483", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1606.383991] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Acquired lock "refresh_cache-b5ff2a10-3c76-469a-86e0-ed3b135bca37" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.384194] env[62510]: DEBUG nova.network.neutron [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Refreshing network info cache for port 6eb8d1eb-fc0d-41fd-a107-12a791bcd483 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1606.385952] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:5c:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6eb8d1eb-fc0d-41fd-a107-12a791bcd483', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1606.396350] env[62510]: DEBUG oslo.service.loopingcall [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.396662] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1606.397645] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d3100bf-0f57-4b0f-bed3-53fed9fa7618 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.423159] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1606.423159] env[62510]: value = "task-1768759" [ 1606.423159] env[62510]: _type = "Task" [ 1606.423159] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.432794] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768759, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.438879] env[62510]: DEBUG nova.compute.manager [req-e577c493-9b24-474f-92de-78874f8dbe86 req-59eb6a76-8028-4a9a-a099-ca7d375764a4 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-vif-unplugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1606.439135] env[62510]: DEBUG oslo_concurrency.lockutils [req-e577c493-9b24-474f-92de-78874f8dbe86 req-59eb6a76-8028-4a9a-a099-ca7d375764a4 service nova] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.439357] env[62510]: DEBUG oslo_concurrency.lockutils [req-e577c493-9b24-474f-92de-78874f8dbe86 req-59eb6a76-8028-4a9a-a099-ca7d375764a4 service nova] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.439543] env[62510]: DEBUG oslo_concurrency.lockutils [req-e577c493-9b24-474f-92de-78874f8dbe86 req-59eb6a76-8028-4a9a-a099-ca7d375764a4 service nova] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.440583] env[62510]: DEBUG nova.compute.manager [req-e577c493-9b24-474f-92de-78874f8dbe86 req-59eb6a76-8028-4a9a-a099-ca7d375764a4 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] No waiting events found dispatching network-vif-unplugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1606.440583] env[62510]: WARNING nova.compute.manager [req-e577c493-9b24-474f-92de-78874f8dbe86 req-59eb6a76-8028-4a9a-a099-ca7d375764a4 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received unexpected event network-vif-unplugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 for instance with vm_state shelved and task_state shelving_offloading. [ 1606.493955] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1606.494263] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1606.494499] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleting the datastore file [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1606.494735] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d9aeb14-b8a0-4567-895d-fb62c2ac8de3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.500022] env[62510]: DEBUG nova.scheduler.client.report [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1606.504807] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1606.504807] env[62510]: value = "task-1768760" [ 1606.504807] env[62510]: _type = "Task" [ 1606.504807] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.514757] env[62510]: INFO nova.compute.manager [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Took 44.67 seconds to build instance. [ 1606.525918] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.644241] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.251394} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.644577] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1606.645708] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5661ed-a318-49b8-84e9-2287cf2345e2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.676913] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 641628d1-bb6d-4207-89b9-98014328e028/641628d1-bb6d-4207-89b9-98014328e028.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1606.677914] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67ce5573-10c3-4145-a2a3-95d3382c182c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.703315] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1606.703315] env[62510]: value = "task-1768761" [ 1606.703315] env[62510]: _type = "Task" [ 1606.703315] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.714294] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.936370] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768759, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.006226] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.738s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.008986] env[62510]: DEBUG nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1607.010370] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.199s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1607.011915] env[62510]: INFO nova.compute.claims [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1607.028037] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0813f1ef-509b-44d0-a3ad-6116ad805bef tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "13cdba63-5db4-419f-9e0b-244832d7866b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.889s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1607.032822] env[62510]: DEBUG oslo_vmware.api [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1768760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349692} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.036767] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.036767] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1607.036955] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1607.079158] env[62510]: INFO nova.scheduler.client.report [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted allocations for instance 83fa0d32-18ee-401d-af0b-a0adb538e5f4 [ 1607.213462] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.340283] env[62510]: DEBUG nova.network.neutron [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Updated VIF entry in instance network info cache for port 6eb8d1eb-fc0d-41fd-a107-12a791bcd483. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1607.341241] env[62510]: DEBUG nova.network.neutron [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Updating instance_info_cache with network_info: [{"id": "6eb8d1eb-fc0d-41fd-a107-12a791bcd483", "address": "fa:16:3e:ac:5c:6e", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6eb8d1eb-fc", "ovs_interfaceid": "6eb8d1eb-fc0d-41fd-a107-12a791bcd483", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.438352] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768759, 'name': CreateVM_Task, 'duration_secs': 0.754763} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.438352] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1607.441433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.441433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.441433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1607.442967] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f49503b3-ff94-4e69-9f38-061f88c6826c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.450660] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1607.450660] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5200cdc8-2bf1-b90c-1d79-47246c6972b5" [ 1607.450660] env[62510]: _type = "Task" [ 1607.450660] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.460486] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5200cdc8-2bf1-b90c-1d79-47246c6972b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.528456] env[62510]: DEBUG nova.compute.utils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1607.528456] env[62510]: DEBUG nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1607.528456] env[62510]: DEBUG nova.network.neutron [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1607.530714] env[62510]: DEBUG nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1607.584257] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.618087] env[62510]: DEBUG nova.policy [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0b465ab9caf4d989219f1fbbebd00ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd98518565b744451ba90ba301267213f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1607.713400] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768761, 'name': ReconfigVM_Task, 'duration_secs': 0.756499} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.713687] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 641628d1-bb6d-4207-89b9-98014328e028/641628d1-bb6d-4207-89b9-98014328e028.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1607.714343] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3403eb3f-1279-4388-ad33-459a95345549 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.724242] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1607.724242] env[62510]: value = "task-1768762" [ 1607.724242] env[62510]: _type = "Task" [ 1607.724242] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.733976] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768762, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.815095] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ac21bc-0076-41e3-8c7d-265164d9abcf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.836107] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance 'fae7e580-ab09-4fda-9cbe-0e066ddcb85c' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1607.843178] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Releasing lock "refresh_cache-b5ff2a10-3c76-469a-86e0-ed3b135bca37" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.843510] env[62510]: DEBUG nova.compute.manager [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Received event network-changed-9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1607.843728] env[62510]: DEBUG nova.compute.manager [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Refreshing instance network info cache due to event network-changed-9a53a8d4-8b7d-4167-b888-f20b2fce23c5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1607.843974] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Acquiring lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.844217] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Acquired lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.844437] env[62510]: DEBUG nova.network.neutron [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Refreshing network info cache for port 9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1607.962534] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5200cdc8-2bf1-b90c-1d79-47246c6972b5, 'name': SearchDatastore_Task, 'duration_secs': 0.010207} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.962874] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.963129] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1607.963376] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.963538] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.963710] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1607.963970] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0ae0f59-6ed1-4f96-be0e-14a549ddda34 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.974683] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1607.974908] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1607.975851] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfb25995-7411-4bc0-84c8-c01fd24441b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.983359] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1607.983359] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525360b4-0f09-b490-af4c-3963ed828211" [ 1607.983359] env[62510]: _type = "Task" [ 1607.983359] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.991909] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525360b4-0f09-b490-af4c-3963ed828211, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.018685] env[62510]: DEBUG nova.network.neutron [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Successfully created port: 28687f38-1a1a-40ad-ad64-c571d7a7dbe3 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1608.036206] env[62510]: DEBUG nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1608.053805] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.235857] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768762, 'name': Rename_Task, 'duration_secs': 0.322626} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.238558] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1608.239124] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c07538a0-2cf7-4c64-a5ae-fea8bbc42165 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.246991] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1608.246991] env[62510]: value = "task-1768763" [ 1608.246991] env[62510]: _type = "Task" [ 1608.246991] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.260546] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768763, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.345784] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1608.345784] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26211e53-86d4-4ed2-9df2-64a6f92d5824 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.354021] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1608.354021] env[62510]: value = "task-1768764" [ 1608.354021] env[62510]: _type = "Task" [ 1608.354021] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.368652] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768764, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.500354] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525360b4-0f09-b490-af4c-3963ed828211, 'name': SearchDatastore_Task, 'duration_secs': 0.0107} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.501607] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75a53066-82c2-4e78-a34d-192af70a4436 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.512830] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1608.512830] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52feca87-ac58-e7ed-ae6a-54a2997e72ea" [ 1608.512830] env[62510]: _type = "Task" [ 1608.512830] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.529794] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52feca87-ac58-e7ed-ae6a-54a2997e72ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.554630] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231d8f13-5b1a-433e-b1cd-44678b767821 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.564328] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9616e7d-6425-4963-9665-32d03b0bd629 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.605420] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83328e97-fc5b-49b0-9146-6c9e8c73f471 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.617206] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861ae1e1-183c-4b06-bb6f-44f7b3d8d42f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.633990] env[62510]: DEBUG nova.compute.provider_tree [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1608.694161] env[62510]: DEBUG nova.network.neutron [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Updated VIF entry in instance network info cache for port 9a53a8d4-8b7d-4167-b888-f20b2fce23c5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1608.694447] env[62510]: DEBUG nova.network.neutron [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Updating instance_info_cache with network_info: [{"id": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "address": "fa:16:3e:17:66:3e", "network": {"id": "00c0e43c-22e9-4b57-9337-f13ecd10f244", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1255004128-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "98d6c12eccf74757b3cbc2c8acddeb19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a53a8d4-8b", "ovs_interfaceid": "9a53a8d4-8b7d-4167-b888-f20b2fce23c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.733800] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Acquiring lock "1f0ab639-bfcb-48eb-a079-ea07dd627c2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.733800] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Lock "1f0ab639-bfcb-48eb-a079-ea07dd627c2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.758651] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768763, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.862975] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768764, 'name': PowerOffVM_Task, 'duration_secs': 0.203523} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.863246] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1608.863382] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance 'fae7e580-ab09-4fda-9cbe-0e066ddcb85c' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1609.025948] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52feca87-ac58-e7ed-ae6a-54a2997e72ea, 'name': SearchDatastore_Task, 'duration_secs': 0.032115} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.026269] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.026488] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b5ff2a10-3c76-469a-86e0-ed3b135bca37/b5ff2a10-3c76-469a-86e0-ed3b135bca37.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1609.026783] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-018cbd86-53d6-45f7-b290-758213e9dbe6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.034390] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1609.034390] env[62510]: value = "task-1768765" [ 1609.034390] env[62510]: _type = "Task" [ 1609.034390] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.042985] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768765, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.045096] env[62510]: DEBUG nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1609.074981] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1609.075297] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1609.075606] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1609.075856] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1609.076162] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1609.076254] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1609.076491] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1609.076706] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1609.076901] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1609.077100] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1609.077336] env[62510]: DEBUG nova.virt.hardware [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1609.078385] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7aee2e-f4e0-4e25-8eba-30fbf7a4940e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.087183] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b186dd0-d1ad-40c5-bc71-3a1122ece98e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.138151] env[62510]: DEBUG nova.scheduler.client.report [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1609.200628] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e2ffdbc-756b-49c3-a350-635f80313499 req-5dcc171f-6f01-40fa-a6b5-b1f8b8ab9882 service nova] Releasing lock "refresh_cache-0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.262875] env[62510]: DEBUG oslo_vmware.api [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768763, 'name': PowerOnVM_Task, 'duration_secs': 0.599211} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.263338] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1609.263575] env[62510]: INFO nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1609.263842] env[62510]: DEBUG nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1609.265035] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a32f461-0065-4a4b-b2e8-be60dcfc628a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.373607] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1609.373943] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1609.374362] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1609.374460] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1609.374631] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1609.374853] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1609.375219] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1609.375316] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1609.376843] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1609.377187] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1609.377539] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1609.389760] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aca106d5-771a-4932-9560-d0accbf48422 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.421784] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1609.421784] env[62510]: value = "task-1768766" [ 1609.421784] env[62510]: _type = "Task" [ 1609.421784] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.438620] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768766, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.545454] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768765, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.622395] env[62510]: DEBUG nova.compute.manager [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1609.622715] env[62510]: DEBUG nova.compute.manager [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing instance network info cache due to event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1609.623066] env[62510]: DEBUG oslo_concurrency.lockutils [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] Acquiring lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.623316] env[62510]: DEBUG oslo_concurrency.lockutils [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] Acquired lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.623588] env[62510]: DEBUG nova.network.neutron [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1609.645016] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.645016] env[62510]: DEBUG nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1609.646538] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.909s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.648912] env[62510]: INFO nova.compute.claims [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1609.788913] env[62510]: INFO nova.compute.manager [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Took 46.85 seconds to build instance. [ 1609.933724] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768766, 'name': ReconfigVM_Task, 'duration_secs': 0.399705} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.934143] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance 'fae7e580-ab09-4fda-9cbe-0e066ddcb85c' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1610.047835] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768765, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535735} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.048147] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b5ff2a10-3c76-469a-86e0-ed3b135bca37/b5ff2a10-3c76-469a-86e0-ed3b135bca37.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1610.048246] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1610.048719] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb415eda-12a3-412f-8180-7d25964f1e70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.056369] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1610.056369] env[62510]: value = "task-1768767" [ 1610.056369] env[62510]: _type = "Task" [ 1610.056369] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.065427] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768767, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.156307] env[62510]: DEBUG nova.compute.utils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1610.162577] env[62510]: DEBUG nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1610.162768] env[62510]: DEBUG nova.network.neutron [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1610.239066] env[62510]: DEBUG nova.policy [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '635c1339dcc74d98adf84fbf48042083', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e39188afd4e94f01a5b3f1ec78cf70e4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1610.294577] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d1893303-4084-4151-b25b-0eb085a982df tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "641628d1-bb6d-4207-89b9-98014328e028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.310s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.364349] env[62510]: DEBUG nova.network.neutron [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Successfully updated port: 28687f38-1a1a-40ad-ad64-c571d7a7dbe3 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1610.441498] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:36:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c7683c68-4a26-4844-9915-d8d489d9d625',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1203151111',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1610.441750] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1610.441909] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.442323] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1610.442528] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.442681] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1610.442887] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1610.443061] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1610.443235] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1610.443412] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1610.443607] env[62510]: DEBUG nova.virt.hardware [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1610.450958] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1610.453892] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a80286b6-ae4f-4c4a-80ff-e8b9da6c04fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.481518] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1610.481518] env[62510]: value = "task-1768768" [ 1610.481518] env[62510]: _type = "Task" [ 1610.481518] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.492564] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.543698] env[62510]: DEBUG nova.network.neutron [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updated VIF entry in instance network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1610.544328] env[62510]: DEBUG nova.network.neutron [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.568874] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768767, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07293} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.569182] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1610.569965] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8251368f-21fa-468a-aeb0-c9912749e4ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.594402] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] b5ff2a10-3c76-469a-86e0-ed3b135bca37/b5ff2a10-3c76-469a-86e0-ed3b135bca37.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1610.594734] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa115834-8d92-4e49-a1f3-955cca5818cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.618038] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1610.618038] env[62510]: value = "task-1768769" [ 1610.618038] env[62510]: _type = "Task" [ 1610.618038] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.628181] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768769, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.667674] env[62510]: DEBUG nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1610.682235] env[62510]: DEBUG nova.network.neutron [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Successfully created port: 648008fa-8f2b-4cb2-a911-200874a59cc0 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1610.770260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "641628d1-bb6d-4207-89b9-98014328e028" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.770260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "641628d1-bb6d-4207-89b9-98014328e028" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.770260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "641628d1-bb6d-4207-89b9-98014328e028-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.770260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "641628d1-bb6d-4207-89b9-98014328e028-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.770260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "641628d1-bb6d-4207-89b9-98014328e028-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.772143] env[62510]: INFO nova.compute.manager [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Terminating instance [ 1610.800065] env[62510]: DEBUG nova.compute.manager [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1610.870354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "refresh_cache-d3e25d50-f315-439b-9e9f-8e454a0631d4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.870354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "refresh_cache-d3e25d50-f315-439b-9e9f-8e454a0631d4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.870439] env[62510]: DEBUG nova.network.neutron [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1610.996636] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768768, 'name': ReconfigVM_Task, 'duration_secs': 0.390176} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.997078] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1610.998335] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2247183a-1842-4012-a397-8e9b820b6117 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.032224] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1611.036289] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73e1ed74-9028-43ed-a7eb-a4f786c04b50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.050193] env[62510]: DEBUG oslo_concurrency.lockutils [req-45725f71-d40d-4c90-9edb-1f0ab3a12d7f req-e4fb707b-21e8-4dbc-a18b-d9f297b25355 service nova] Releasing lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.057338] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1611.057338] env[62510]: value = "task-1768770" [ 1611.057338] env[62510]: _type = "Task" [ 1611.057338] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.071291] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.135342] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.277880] env[62510]: DEBUG nova.compute.manager [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1611.278193] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1611.279219] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a26000-a9c6-42d6-8798-997ab4f3d300 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.293355] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1611.293790] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7f3c164-b23f-4b87-9500-85e05babfc58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.297474] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d6de73-cbc9-45b6-ae6e-06e649d93c65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.313138] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac1e276-962d-4fb2-a7f4-8feffd47556a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.317190] env[62510]: DEBUG oslo_vmware.api [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1611.317190] env[62510]: value = "task-1768771" [ 1611.317190] env[62510]: _type = "Task" [ 1611.317190] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.356020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.358626] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8c71f6-ac86-4f48-bfd1-61bcd800848b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.361495] env[62510]: DEBUG oslo_vmware.api [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.368575] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc7c1dd-d251-4ffd-9c00-99c6d190656a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.385821] env[62510]: DEBUG nova.compute.provider_tree [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1611.568754] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768770, 'name': ReconfigVM_Task, 'duration_secs': 0.418832} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.569058] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.569345] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance 'fae7e580-ab09-4fda-9cbe-0e066ddcb85c' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1611.632199] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768769, 'name': ReconfigVM_Task, 'duration_secs': 0.651708} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.632930] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Reconfigured VM instance instance-00000038 to attach disk [datastore1] b5ff2a10-3c76-469a-86e0-ed3b135bca37/b5ff2a10-3c76-469a-86e0-ed3b135bca37.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.633573] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c85cca4d-3408-4f81-ae0f-4a188daa8464 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.641372] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1611.641372] env[62510]: value = "task-1768772" [ 1611.641372] env[62510]: _type = "Task" [ 1611.641372] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.646192] env[62510]: DEBUG nova.network.neutron [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1611.653130] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768772, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.682489] env[62510]: DEBUG nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1611.685161] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.724665] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1611.725036] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1611.725294] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1611.725631] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1611.725830] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1611.725991] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1611.726614] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1611.726614] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1611.726722] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1611.726882] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1611.727072] env[62510]: DEBUG nova.virt.hardware [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1611.727988] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b4c2cef-fd6c-443d-a799-fdea87c40f77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.738440] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16597663-039e-43d2-b883-442b2ff5f5f5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.830937] env[62510]: DEBUG oslo_vmware.api [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768771, 'name': PowerOffVM_Task, 'duration_secs': 0.221044} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.832148] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1611.832845] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1611.833244] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf3d4bff-e690-4883-a178-711c2ce8ff8a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.840652] env[62510]: DEBUG nova.network.neutron [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Updating instance_info_cache with network_info: [{"id": "28687f38-1a1a-40ad-ad64-c571d7a7dbe3", "address": "fa:16:3e:63:b6:23", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28687f38-1a", "ovs_interfaceid": "28687f38-1a1a-40ad-ad64-c571d7a7dbe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.892097] env[62510]: DEBUG nova.scheduler.client.report [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1611.970554] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1611.973025] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1611.973025] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Deleting the datastore file [datastore1] 641628d1-bb6d-4207-89b9-98014328e028 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1611.973025] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afeabf37-f495-423f-9f02-64b39a52de62 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.985017] env[62510]: DEBUG oslo_vmware.api [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for the task: (returnval){ [ 1611.985017] env[62510]: value = "task-1768774" [ 1611.985017] env[62510]: _type = "Task" [ 1611.985017] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.994212] env[62510]: DEBUG oslo_vmware.api [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.012936] env[62510]: DEBUG nova.compute.manager [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Received event network-changed-30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1612.013778] env[62510]: DEBUG nova.compute.manager [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Refreshing instance network info cache due to event network-changed-30661ae8-22f8-4f9e-91d9-67d7a31e134c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1612.014182] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Acquiring lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.014496] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Acquired lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.014815] env[62510]: DEBUG nova.network.neutron [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Refreshing network info cache for port 30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1612.076736] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa97a69-c1a2-4a97-8282-333ff12b09e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.098735] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3596bf9-0ef7-4b3f-9d30-07771cbab043 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.118047] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance 'fae7e580-ab09-4fda-9cbe-0e066ddcb85c' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1612.152824] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768772, 'name': Rename_Task, 'duration_secs': 0.217403} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.153248] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1612.153696] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e0179a5-22fd-4c46-9c8c-153b2a987051 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.161103] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1612.161103] env[62510]: value = "task-1768775" [ 1612.161103] env[62510]: _type = "Task" [ 1612.161103] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.171303] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.344486] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "refresh_cache-d3e25d50-f315-439b-9e9f-8e454a0631d4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.344486] env[62510]: DEBUG nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Instance network_info: |[{"id": "28687f38-1a1a-40ad-ad64-c571d7a7dbe3", "address": "fa:16:3e:63:b6:23", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28687f38-1a", "ovs_interfaceid": "28687f38-1a1a-40ad-ad64-c571d7a7dbe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1612.344839] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:b6:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4e52d8a-b086-4333-a5a1-938680a2d2bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28687f38-1a1a-40ad-ad64-c571d7a7dbe3', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1612.356016] env[62510]: DEBUG oslo.service.loopingcall [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1612.356016] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1612.356016] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75192001-52c1-4825-914b-b660e2533e09 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.393836] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1612.393836] env[62510]: value = "task-1768776" [ 1612.393836] env[62510]: _type = "Task" [ 1612.393836] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.400800] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.754s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.401326] env[62510]: DEBUG nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1612.408966] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.072s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.408966] env[62510]: DEBUG nova.objects.instance [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'resources' on Instance uuid 0029d975-bd48-4558-9f41-a0cf91336393 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1612.409443] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768776, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.444313] env[62510]: DEBUG nova.network.neutron [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Successfully updated port: 648008fa-8f2b-4cb2-a911-200874a59cc0 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1612.497504] env[62510]: DEBUG oslo_vmware.api [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Task: {'id': task-1768774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149619} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.497504] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1612.497504] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1612.497504] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1612.497703] env[62510]: INFO nova.compute.manager [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1612.498616] env[62510]: DEBUG oslo.service.loopingcall [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1612.498616] env[62510]: DEBUG nova.compute.manager [-] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1612.498616] env[62510]: DEBUG nova.network.neutron [-] [instance: 641628d1-bb6d-4207-89b9-98014328e028] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1612.672384] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768775, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.690983] env[62510]: DEBUG nova.network.neutron [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Port 21d515b9-b00f-45cc-9437-318ee6bba755 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1612.716487] env[62510]: DEBUG nova.compute.manager [req-d542269e-123a-4bce-bd96-40e63587f778 req-077014c7-16f7-4be9-b063-812093153b3f service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Received event network-vif-plugged-648008fa-8f2b-4cb2-a911-200874a59cc0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1612.716834] env[62510]: DEBUG oslo_concurrency.lockutils [req-d542269e-123a-4bce-bd96-40e63587f778 req-077014c7-16f7-4be9-b063-812093153b3f service nova] Acquiring lock "0d27da5c-20f3-4df1-86d2-036c904fd657-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1612.716834] env[62510]: DEBUG oslo_concurrency.lockutils [req-d542269e-123a-4bce-bd96-40e63587f778 req-077014c7-16f7-4be9-b063-812093153b3f service nova] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.717142] env[62510]: DEBUG oslo_concurrency.lockutils [req-d542269e-123a-4bce-bd96-40e63587f778 req-077014c7-16f7-4be9-b063-812093153b3f service nova] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.718155] env[62510]: DEBUG nova.compute.manager [req-d542269e-123a-4bce-bd96-40e63587f778 req-077014c7-16f7-4be9-b063-812093153b3f service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] No waiting events found dispatching network-vif-plugged-648008fa-8f2b-4cb2-a911-200874a59cc0 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1612.718401] env[62510]: WARNING nova.compute.manager [req-d542269e-123a-4bce-bd96-40e63587f778 req-077014c7-16f7-4be9-b063-812093153b3f service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Received unexpected event network-vif-plugged-648008fa-8f2b-4cb2-a911-200874a59cc0 for instance with vm_state building and task_state spawning. [ 1612.905779] env[62510]: DEBUG nova.network.neutron [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updated VIF entry in instance network info cache for port 30661ae8-22f8-4f9e-91d9-67d7a31e134c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1612.905779] env[62510]: DEBUG nova.network.neutron [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updating instance_info_cache with network_info: [{"id": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "address": "fa:16:3e:56:11:04", "network": {"id": "d642e7b5-7af1-4b88-a23f-8cc8ee29428b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1437968534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2a30849f1f4574a890619b3fff7010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30661ae8-22", "ovs_interfaceid": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.917353] env[62510]: DEBUG nova.compute.utils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1612.917353] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768776, 'name': CreateVM_Task, 'duration_secs': 0.402099} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.917829] env[62510]: DEBUG nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1612.918037] env[62510]: DEBUG nova.network.neutron [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1612.920058] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1612.921118] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.921385] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.921741] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1612.923259] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f05be585-fd8e-4fbd-96da-9b4573028a7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.931402] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1612.931402] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52794a90-b250-b0ee-03f5-60ec123c362e" [ 1612.931402] env[62510]: _type = "Task" [ 1612.931402] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.948605] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.948850] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.950587] env[62510]: DEBUG nova.network.neutron [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1612.951208] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52794a90-b250-b0ee-03f5-60ec123c362e, 'name': SearchDatastore_Task, 'duration_secs': 0.012258} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.955646] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.956098] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1612.956426] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.956920] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.957301] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1612.958316] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef87ffd1-9312-459d-95fb-b8901ee8f545 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.968846] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1612.968846] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1612.972146] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48425e9e-7122-4ccc-84f1-c627369fc2de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.976290] env[62510]: DEBUG nova.policy [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e483d7dc32804985bc9af5128670131b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b13a257970e4a9a9f9cfecaaf37d9da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1612.981726] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1612.981726] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524dba1c-e016-9984-ffe1-4907a155fddc" [ 1612.981726] env[62510]: _type = "Task" [ 1612.981726] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.993750] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524dba1c-e016-9984-ffe1-4907a155fddc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.174386] env[62510]: DEBUG oslo_vmware.api [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768775, 'name': PowerOnVM_Task, 'duration_secs': 0.615605} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.174758] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1613.175035] env[62510]: INFO nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1613.175263] env[62510]: DEBUG nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1613.176151] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6546995-01d9-4bc4-8984-4e0cf25e0b50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.257462] env[62510]: DEBUG nova.network.neutron [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Successfully created port: 5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1613.420200] env[62510]: DEBUG nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1613.422645] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Releasing lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.423125] env[62510]: DEBUG nova.compute.manager [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Received event network-vif-plugged-28687f38-1a1a-40ad-ad64-c571d7a7dbe3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1613.423441] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Acquiring lock "d3e25d50-f315-439b-9e9f-8e454a0631d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.423778] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.424260] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.424838] env[62510]: DEBUG nova.compute.manager [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] No waiting events found dispatching network-vif-plugged-28687f38-1a1a-40ad-ad64-c571d7a7dbe3 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1613.425352] env[62510]: WARNING nova.compute.manager [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Received unexpected event network-vif-plugged-28687f38-1a1a-40ad-ad64-c571d7a7dbe3 for instance with vm_state building and task_state spawning. [ 1613.425688] env[62510]: DEBUG nova.compute.manager [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Received event network-changed-28687f38-1a1a-40ad-ad64-c571d7a7dbe3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1613.425965] env[62510]: DEBUG nova.compute.manager [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Refreshing instance network info cache due to event network-changed-28687f38-1a1a-40ad-ad64-c571d7a7dbe3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1613.426266] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Acquiring lock "refresh_cache-d3e25d50-f315-439b-9e9f-8e454a0631d4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.426519] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Acquired lock "refresh_cache-d3e25d50-f315-439b-9e9f-8e454a0631d4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.426843] env[62510]: DEBUG nova.network.neutron [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Refreshing network info cache for port 28687f38-1a1a-40ad-ad64-c571d7a7dbe3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1613.442448] env[62510]: DEBUG nova.network.neutron [-] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.480587] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbef83d-9bed-425b-9eed-fecff509ac06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.500864] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524dba1c-e016-9984-ffe1-4907a155fddc, 'name': SearchDatastore_Task, 'duration_secs': 0.010793} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.501931] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f9ba5d-d07c-4668-826c-83dc9247adbc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.506450] env[62510]: DEBUG nova.network.neutron [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1613.511738] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d080703-113e-49af-8402-32c0ac3af9ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.518621] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1613.518621] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52273e71-e1f7-7013-b83c-4bec6bb14c0d" [ 1613.518621] env[62510]: _type = "Task" [ 1613.518621] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.554082] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657c46d8-e8c6-4506-b680-9d6113815bd3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.567536] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05eb455-24fa-489e-87fb-903510c84483 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.572791] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52273e71-e1f7-7013-b83c-4bec6bb14c0d, 'name': SearchDatastore_Task, 'duration_secs': 0.012194} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.573124] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.573487] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] d3e25d50-f315-439b-9e9f-8e454a0631d4/d3e25d50-f315-439b-9e9f-8e454a0631d4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1613.574157] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f16358ab-b226-4fc8-a74a-ec69cf7b9345 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.586379] env[62510]: DEBUG nova.compute.provider_tree [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1613.589236] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1613.589236] env[62510]: value = "task-1768777" [ 1613.589236] env[62510]: _type = "Task" [ 1613.589236] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.602298] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768777, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.697077] env[62510]: DEBUG nova.network.neutron [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Updating instance_info_cache with network_info: [{"id": "648008fa-8f2b-4cb2-a911-200874a59cc0", "address": "fa:16:3e:1d:0c:fd", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648008fa-8f", "ovs_interfaceid": "648008fa-8f2b-4cb2-a911-200874a59cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.703570] env[62510]: INFO nova.compute.manager [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Took 44.86 seconds to build instance. [ 1613.722337] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.722630] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.722834] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.944989] env[62510]: INFO nova.compute.manager [-] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Took 1.45 seconds to deallocate network for instance. [ 1614.091219] env[62510]: DEBUG nova.scheduler.client.report [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1614.110593] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768777, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508988} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.111216] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] d3e25d50-f315-439b-9e9f-8e454a0631d4/d3e25d50-f315-439b-9e9f-8e454a0631d4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1614.112620] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1614.112620] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a1c174a-c571-47a5-86c3-d899ddd6d9d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.121973] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1614.121973] env[62510]: value = "task-1768778" [ 1614.121973] env[62510]: _type = "Task" [ 1614.121973] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.140836] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.203173] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.203173] env[62510]: DEBUG nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Instance network_info: |[{"id": "648008fa-8f2b-4cb2-a911-200874a59cc0", "address": "fa:16:3e:1d:0c:fd", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648008fa-8f", "ovs_interfaceid": "648008fa-8f2b-4cb2-a911-200874a59cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1614.203910] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:0c:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '648008fa-8f2b-4cb2-a911-200874a59cc0', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1614.212827] env[62510]: DEBUG oslo.service.loopingcall [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1614.213663] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1614.214337] env[62510]: DEBUG oslo_concurrency.lockutils [None req-167dcb08-16dc-40d4-ae72-75e8e001a8ab tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.647s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.214801] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a19ff281-a961-4432-95c8-d595eff1968d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.239865] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1614.239865] env[62510]: value = "task-1768779" [ 1614.239865] env[62510]: _type = "Task" [ 1614.239865] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.249779] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768779, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.368098] env[62510]: DEBUG nova.network.neutron [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Updated VIF entry in instance network info cache for port 28687f38-1a1a-40ad-ad64-c571d7a7dbe3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1614.368540] env[62510]: DEBUG nova.network.neutron [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Updating instance_info_cache with network_info: [{"id": "28687f38-1a1a-40ad-ad64-c571d7a7dbe3", "address": "fa:16:3e:63:b6:23", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28687f38-1a", "ovs_interfaceid": "28687f38-1a1a-40ad-ad64-c571d7a7dbe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.439088] env[62510]: DEBUG nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1614.454078] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.473035] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1614.473300] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1614.473462] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1614.473748] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1614.473970] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1614.474147] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1614.474369] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1614.474522] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1614.474685] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1614.474844] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1614.475023] env[62510]: DEBUG nova.virt.hardware [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1614.475915] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663895ba-cd5c-4157-9d59-bfae68c8d365 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.485993] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06e90a0-37a6-4639-a106-b7f353e65ca0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.603783] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.196s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.606236] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.298s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.606480] env[62510]: DEBUG nova.objects.instance [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lazy-loading 'resources' on Instance uuid 2c5c38c1-511f-4aae-969a-eb6de128fae7 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1614.629774] env[62510]: INFO nova.scheduler.client.report [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted allocations for instance 0029d975-bd48-4558-9f41-a0cf91336393 [ 1614.643500] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092417} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.647023] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1614.647023] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523aea57-715a-4568-93e4-5497e1710c47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.672967] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] d3e25d50-f315-439b-9e9f-8e454a0631d4/d3e25d50-f315-439b-9e9f-8e454a0631d4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1614.673173] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-315749e1-0434-4009-bf95-d514ddf7e9a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.695815] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1614.695815] env[62510]: value = "task-1768780" [ 1614.695815] env[62510]: _type = "Task" [ 1614.695815] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.706829] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768780, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.757726] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768779, 'name': CreateVM_Task, 'duration_secs': 0.449431} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.757924] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1614.758646] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.758815] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.759203] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1614.759727] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-057b4df7-8695-4b8d-8824-110183ca3e30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.767395] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1614.767395] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5215b1a8-bf42-f915-f21a-0619dd9cf8df" [ 1614.767395] env[62510]: _type = "Task" [ 1614.767395] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.776140] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5215b1a8-bf42-f915-f21a-0619dd9cf8df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.778918] env[62510]: DEBUG nova.compute.manager [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Received event network-changed-30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1614.779030] env[62510]: DEBUG nova.compute.manager [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Refreshing instance network info cache due to event network-changed-30661ae8-22f8-4f9e-91d9-67d7a31e134c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1614.779765] env[62510]: DEBUG oslo_concurrency.lockutils [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] Acquiring lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.779765] env[62510]: DEBUG oslo_concurrency.lockutils [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] Acquired lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.779765] env[62510]: DEBUG nova.network.neutron [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Refreshing network info cache for port 30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1614.823081] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.823081] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.823259] env[62510]: DEBUG nova.network.neutron [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1614.872394] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d450f3b-396f-4b8d-a9e4-d76f6e5b785d req-e88ac0a8-ad09-4717-ae97-c823300df934 service nova] Releasing lock "refresh_cache-d3e25d50-f315-439b-9e9f-8e454a0631d4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.129641] env[62510]: DEBUG nova.network.neutron [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Successfully updated port: 5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1615.148176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-497a6ef0-b6ec-435e-a80f-dca727a12e36 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0029d975-bd48-4558-9f41-a0cf91336393" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.129s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.169974] env[62510]: DEBUG nova.compute.manager [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Received event network-changed-648008fa-8f2b-4cb2-a911-200874a59cc0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1615.170182] env[62510]: DEBUG nova.compute.manager [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Refreshing instance network info cache due to event network-changed-648008fa-8f2b-4cb2-a911-200874a59cc0. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1615.170725] env[62510]: DEBUG oslo_concurrency.lockutils [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] Acquiring lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.170725] env[62510]: DEBUG oslo_concurrency.lockutils [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] Acquired lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.170725] env[62510]: DEBUG nova.network.neutron [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Refreshing network info cache for port 648008fa-8f2b-4cb2-a911-200874a59cc0 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1615.210137] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768780, 'name': ReconfigVM_Task, 'duration_secs': 0.322139} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.211090] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Reconfigured VM instance instance-00000039 to attach disk [datastore1] d3e25d50-f315-439b-9e9f-8e454a0631d4/d3e25d50-f315-439b-9e9f-8e454a0631d4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1615.214065] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-558fbb88-e79e-44f1-9b65-35eb61792682 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.220426] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1615.220426] env[62510]: value = "task-1768781" [ 1615.220426] env[62510]: _type = "Task" [ 1615.220426] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.231928] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768781, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.287800] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5215b1a8-bf42-f915-f21a-0619dd9cf8df, 'name': SearchDatastore_Task, 'duration_secs': 0.010509} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.288459] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.288808] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1615.289102] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.289675] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.290837] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1615.290939] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f52f120f-d6b9-4271-8c6e-1630d08fe6cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.302801] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1615.302801] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1615.302801] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6341a353-6aea-4a06-8e6f-927a2ba08aca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.311179] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1615.311179] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f5e3e1-aec8-9333-4dd6-74d87231b379" [ 1615.311179] env[62510]: _type = "Task" [ 1615.311179] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.323814] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f5e3e1-aec8-9333-4dd6-74d87231b379, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.554456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.554456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.554456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.554456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.554456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.559829] env[62510]: INFO nova.compute.manager [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Terminating instance [ 1615.632352] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.632352] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.632352] env[62510]: DEBUG nova.network.neutron [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1615.733315] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768781, 'name': Rename_Task, 'duration_secs': 0.174858} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.738306] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1615.738306] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70bd83e5-8d01-4582-978d-1f3c757bd0ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.745836] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1615.745836] env[62510]: value = "task-1768782" [ 1615.745836] env[62510]: _type = "Task" [ 1615.745836] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.762747] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768782, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.789138] env[62510]: DEBUG nova.network.neutron [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updated VIF entry in instance network info cache for port 30661ae8-22f8-4f9e-91d9-67d7a31e134c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1615.789563] env[62510]: DEBUG nova.network.neutron [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updating instance_info_cache with network_info: [{"id": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "address": "fa:16:3e:56:11:04", "network": {"id": "d642e7b5-7af1-4b88-a23f-8cc8ee29428b", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1437968534-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cd2a30849f1f4574a890619b3fff7010", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30661ae8-22", "ovs_interfaceid": "30661ae8-22f8-4f9e-91d9-67d7a31e134c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.821436] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f5e3e1-aec8-9333-4dd6-74d87231b379, 'name': SearchDatastore_Task, 'duration_secs': 0.00998} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.822267] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5e438ab-92f8-4f96-bab0-de003ad830d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.825680] env[62510]: DEBUG nova.network.neutron [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance_info_cache with network_info: [{"id": "21d515b9-b00f-45cc-9437-318ee6bba755", "address": "fa:16:3e:66:30:96", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d515b9-b0", "ovs_interfaceid": "21d515b9-b00f-45cc-9437-318ee6bba755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.831446] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1615.831446] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52219371-875b-c283-6ffc-463de6397cdd" [ 1615.831446] env[62510]: _type = "Task" [ 1615.831446] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.845691] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52219371-875b-c283-6ffc-463de6397cdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.915728] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc3fafe-28af-4788-a0f4-52f763fbcfce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.926379] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8031c0cb-a640-4166-8eae-8e6941251915 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.960522] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe398dc-405e-4569-93fd-62852afdfcab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.970621] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d506318f-8911-4b20-8658-513c15a31696 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.992021] env[62510]: DEBUG nova.compute.provider_tree [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1616.064254] env[62510]: DEBUG nova.compute.manager [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1616.064494] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1616.065505] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab2dc00-21db-4f4c-9956-c8ff15d76d89 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.077570] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1616.077870] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26545f41-108b-488b-a374-4b13f5d2daf0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.087307] env[62510]: DEBUG oslo_vmware.api [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1616.087307] env[62510]: value = "task-1768783" [ 1616.087307] env[62510]: _type = "Task" [ 1616.087307] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.096672] env[62510]: DEBUG oslo_vmware.api [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768783, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.174894] env[62510]: DEBUG nova.network.neutron [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Updated VIF entry in instance network info cache for port 648008fa-8f2b-4cb2-a911-200874a59cc0. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1616.174894] env[62510]: DEBUG nova.network.neutron [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Updating instance_info_cache with network_info: [{"id": "648008fa-8f2b-4cb2-a911-200874a59cc0", "address": "fa:16:3e:1d:0c:fd", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648008fa-8f", "ovs_interfaceid": "648008fa-8f2b-4cb2-a911-200874a59cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.260459] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768782, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.298684] env[62510]: DEBUG oslo_concurrency.lockutils [req-a2f38f20-3322-401e-97e1-df067cb7659d req-5a8bdff1-df34-43d0-82a8-8c1008316e9c service nova] Releasing lock "refresh_cache-13cdba63-5db4-419f-9e0b-244832d7866b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.329583] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.345404] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52219371-875b-c283-6ffc-463de6397cdd, 'name': SearchDatastore_Task, 'duration_secs': 0.012398} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.345578] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.345859] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0d27da5c-20f3-4df1-86d2-036c904fd657/0d27da5c-20f3-4df1-86d2-036c904fd657.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1616.346142] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c71d778-a08f-4a58-ba4d-d1e64ce766c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.354881] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1616.354881] env[62510]: value = "task-1768784" [ 1616.354881] env[62510]: _type = "Task" [ 1616.354881] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.362956] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.369033] env[62510]: DEBUG nova.network.neutron [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1616.495077] env[62510]: DEBUG nova.scheduler.client.report [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1616.651605] env[62510]: DEBUG oslo_vmware.api [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768783, 'name': PowerOffVM_Task, 'duration_secs': 0.217272} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.651605] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1616.651605] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1616.651605] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-239ad5f4-7a42-430b-ba7f-2bdef4915029 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.678602] env[62510]: DEBUG oslo_concurrency.lockutils [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] Releasing lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.678856] env[62510]: DEBUG nova.compute.manager [req-67c293e9-e468-4505-8073-60f8e2dcb9aa req-96cf6367-14f6-45b8-82c1-c717f98191c9 service nova] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Received event network-vif-deleted-246fe90c-d755-46fd-a256-e1f26ac76e09 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1616.680645] env[62510]: DEBUG nova.network.neutron [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.759299] env[62510]: DEBUG oslo_vmware.api [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768782, 'name': PowerOnVM_Task, 'duration_secs': 0.543511} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.759773] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1616.762831] env[62510]: INFO nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Took 7.71 seconds to spawn the instance on the hypervisor. [ 1616.762831] env[62510]: DEBUG nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1616.762831] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775878f7-5167-4113-9726-8ce7cd54c832 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.780420] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1616.780650] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1616.780824] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleting the datastore file [datastore1] b5ff2a10-3c76-469a-86e0-ed3b135bca37 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1616.781131] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b06e99a9-5871-427d-a2ad-42982eb20804 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.791205] env[62510]: DEBUG oslo_vmware.api [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1616.791205] env[62510]: value = "task-1768786" [ 1616.791205] env[62510]: _type = "Task" [ 1616.791205] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.802801] env[62510]: DEBUG oslo_vmware.api [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.864634] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768784, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.868669] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae87da05-ab5b-4ac1-a12b-47e4807433d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.889390] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95243552-d184-4b29-8c4f-88a382c7a011 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.901253] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance 'fae7e580-ab09-4fda-9cbe-0e066ddcb85c' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1617.006129] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.400s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.010393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "13cdba63-5db4-419f-9e0b-244832d7866b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.010393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "13cdba63-5db4-419f-9e0b-244832d7866b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.010393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "13cdba63-5db4-419f-9e0b-244832d7866b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.010393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "13cdba63-5db4-419f-9e0b-244832d7866b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.010393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "13cdba63-5db4-419f-9e0b-244832d7866b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.011508] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.025s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.013173] env[62510]: INFO nova.compute.claims [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1617.016172] env[62510]: INFO nova.compute.manager [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Terminating instance [ 1617.053862] env[62510]: INFO nova.scheduler.client.report [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Deleted allocations for instance 2c5c38c1-511f-4aae-969a-eb6de128fae7 [ 1617.184294] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.185348] env[62510]: DEBUG nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Instance network_info: |[{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1617.186746] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:31:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5550cd5d-e9b6-4414-a8e4-e7c6875d2399', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1617.199588] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Creating folder: Project (5b13a257970e4a9a9f9cfecaaf37d9da). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1617.200518] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcde3e78-64b3-44b5-bbee-f835ceb1b552 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.215314] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Created folder: Project (5b13a257970e4a9a9f9cfecaaf37d9da) in parent group-v367197. [ 1617.216308] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Creating folder: Instances. Parent ref: group-v367348. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1617.216811] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d266c5cf-8a02-430b-8bb0-13f4d6bafc98 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.231531] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Created folder: Instances in parent group-v367348. [ 1617.232784] env[62510]: DEBUG oslo.service.loopingcall [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.233040] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1617.233273] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d376bc0e-320d-4ffe-8b94-cf9c1e94fa96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.260519] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1617.260519] env[62510]: value = "task-1768789" [ 1617.260519] env[62510]: _type = "Task" [ 1617.260519] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.270382] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768789, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.273154] env[62510]: DEBUG nova.compute.manager [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Received event network-vif-plugged-5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1617.273366] env[62510]: DEBUG oslo_concurrency.lockutils [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.273578] env[62510]: DEBUG oslo_concurrency.lockutils [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.273745] env[62510]: DEBUG oslo_concurrency.lockutils [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.275616] env[62510]: DEBUG nova.compute.manager [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] No waiting events found dispatching network-vif-plugged-5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1617.275616] env[62510]: WARNING nova.compute.manager [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Received unexpected event network-vif-plugged-5550cd5d-e9b6-4414-a8e4-e7c6875d2399 for instance with vm_state building and task_state spawning. [ 1617.275616] env[62510]: DEBUG nova.compute.manager [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Received event network-changed-5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1617.275616] env[62510]: DEBUG nova.compute.manager [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Refreshing instance network info cache due to event network-changed-5550cd5d-e9b6-4414-a8e4-e7c6875d2399. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1617.275616] env[62510]: DEBUG oslo_concurrency.lockutils [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.275616] env[62510]: DEBUG oslo_concurrency.lockutils [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.275616] env[62510]: DEBUG nova.network.neutron [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Refreshing network info cache for port 5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1617.287044] env[62510]: INFO nova.compute.manager [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Took 41.00 seconds to build instance. [ 1617.303476] env[62510]: DEBUG oslo_vmware.api [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768786, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.365854] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.762117} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.365854] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0d27da5c-20f3-4df1-86d2-036c904fd657/0d27da5c-20f3-4df1-86d2-036c904fd657.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1617.365991] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1617.366199] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-477bf670-7486-404a-9138-db2872f3422c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.375810] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1617.375810] env[62510]: value = "task-1768790" [ 1617.375810] env[62510]: _type = "Task" [ 1617.375810] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.389258] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.409037] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1617.409402] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67387b34-9b97-4d81-a4e1-6ee4e22e7fc4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.421656] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1617.421656] env[62510]: value = "task-1768791" [ 1617.421656] env[62510]: _type = "Task" [ 1617.421656] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.435630] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.526696] env[62510]: DEBUG nova.compute.manager [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1617.526696] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1617.527851] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28abc6c-c636-49b7-b65b-a57f4d73267b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.554247] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1617.554760] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3f6d46f-c8b7-44cd-a56e-af9e2bc15941 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.567686] env[62510]: DEBUG oslo_vmware.api [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1617.567686] env[62510]: value = "task-1768792" [ 1617.567686] env[62510]: _type = "Task" [ 1617.567686] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.568131] env[62510]: DEBUG oslo_concurrency.lockutils [None req-224361af-372b-499f-b410-55a610f161fd tempest-ServersTestFqdnHostnames-879349947 tempest-ServersTestFqdnHostnames-879349947-project-member] Lock "2c5c38c1-511f-4aae-969a-eb6de128fae7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.272s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.580625] env[62510]: DEBUG oslo_vmware.api [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.774409] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768789, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.789643] env[62510]: DEBUG oslo_concurrency.lockutils [None req-59a9f140-7131-4c51-8844-4e80906b6466 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.910s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.805297] env[62510]: DEBUG oslo_vmware.api [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1768786, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.645874} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.805675] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1617.805946] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1617.806711] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1617.807102] env[62510]: INFO nova.compute.manager [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1617.807387] env[62510]: DEBUG oslo.service.loopingcall [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.807651] env[62510]: DEBUG nova.compute.manager [-] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1617.807885] env[62510]: DEBUG nova.network.neutron [-] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1617.888257] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074634} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.888550] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1617.889411] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fbcd27-6e1f-4d3c-9be6-80c275f0f695 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.919947] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 0d27da5c-20f3-4df1-86d2-036c904fd657/0d27da5c-20f3-4df1-86d2-036c904fd657.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1617.922582] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e759780-44c6-4313-9cc8-4a9e8261d73c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.952593] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768791, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.954317] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1617.954317] env[62510]: value = "task-1768793" [ 1617.954317] env[62510]: _type = "Task" [ 1617.954317] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.964986] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768793, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.080505] env[62510]: DEBUG oslo_vmware.api [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768792, 'name': PowerOffVM_Task, 'duration_secs': 0.344061} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.080848] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1618.081048] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1618.081331] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13e02e7c-bfcc-4661-8bdc-d3d1121a60b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.277494] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768789, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.337959] env[62510]: DEBUG nova.network.neutron [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updated VIF entry in instance network info cache for port 5550cd5d-e9b6-4414-a8e4-e7c6875d2399. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1618.337959] env[62510]: DEBUG nova.network.neutron [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.435403] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1618.435879] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1618.435946] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Deleting the datastore file [datastore1] 13cdba63-5db4-419f-9e0b-244832d7866b {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1618.436229] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a269a72-debb-4f83-9eb8-ba717af69fab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.445669] env[62510]: DEBUG oslo_vmware.api [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for the task: (returnval){ [ 1618.445669] env[62510]: value = "task-1768795" [ 1618.445669] env[62510]: _type = "Task" [ 1618.445669] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.454537] env[62510]: DEBUG oslo_vmware.api [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768791, 'name': PowerOnVM_Task, 'duration_secs': 0.947697} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.455884] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1618.455954] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f636e921-0d7c-4fb3-8b89-e356d21cbd11 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance 'fae7e580-ab09-4fda-9cbe-0e066ddcb85c' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1618.470587] env[62510]: DEBUG oslo_vmware.api [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.480030] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768793, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.540823] env[62510]: DEBUG nova.compute.manager [req-37a5b79a-1e9d-45f3-a371-68ef2ed278e3 req-bd3a80fd-e4a0-4eef-aba0-d89e65a77194 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Received event network-vif-deleted-6eb8d1eb-fc0d-41fd-a107-12a791bcd483 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1618.541180] env[62510]: INFO nova.compute.manager [req-37a5b79a-1e9d-45f3-a371-68ef2ed278e3 req-bd3a80fd-e4a0-4eef-aba0-d89e65a77194 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Neutron deleted interface 6eb8d1eb-fc0d-41fd-a107-12a791bcd483; detaching it from the instance and deleting it from the info cache [ 1618.542434] env[62510]: DEBUG nova.network.neutron [req-37a5b79a-1e9d-45f3-a371-68ef2ed278e3 req-bd3a80fd-e4a0-4eef-aba0-d89e65a77194 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.621670] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa7454b-b798-4324-af11-ce22d4d90155 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.631910] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e06fbd57-1db1-4f1b-a756-546c9b61a667 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Suspending the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1618.632474] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-34dfa2b9-43e2-489c-bf02-a6ca506cb01a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.644926] env[62510]: DEBUG oslo_vmware.api [None req-e06fbd57-1db1-4f1b-a756-546c9b61a667 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1618.644926] env[62510]: value = "task-1768796" [ 1618.644926] env[62510]: _type = "Task" [ 1618.644926] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.656992] env[62510]: DEBUG oslo_vmware.api [None req-e06fbd57-1db1-4f1b-a756-546c9b61a667 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768796, 'name': SuspendVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.673998] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f30cef-3727-4a55-a353-7862dc4bf3cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.683317] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877b80fd-141f-429a-bd05-678f63ffc595 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.728041] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a272cd8c-9f2a-4a84-8ca3-c95f787db2d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.738706] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2a03f7-c171-42cb-98ca-217e3ee609c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.759374] env[62510]: DEBUG nova.compute.provider_tree [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1618.776172] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768789, 'name': CreateVM_Task, 'duration_secs': 1.169505} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.776443] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1618.777449] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.777714] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.778190] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1618.778901] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb78543d-ed19-47ca-afef-998b65a824dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.792024] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1618.792024] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529afa15-7978-9ad3-dc86-3b15d551b94b" [ 1618.792024] env[62510]: _type = "Task" [ 1618.792024] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.798949] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529afa15-7978-9ad3-dc86-3b15d551b94b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.813543] env[62510]: DEBUG nova.network.neutron [-] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.839753] env[62510]: DEBUG oslo_concurrency.lockutils [req-a7ca3aa0-b623-47aa-9b0f-d1292cd358ee req-e6e4f8df-1a41-480a-98ef-410a6c8a31f1 service nova] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.960544] env[62510]: DEBUG oslo_vmware.api [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Task: {'id': task-1768795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289202} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.966301] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1618.966598] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1618.966765] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1618.967400] env[62510]: INFO nova.compute.manager [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Took 1.44 seconds to destroy the instance on the hypervisor. [ 1618.967503] env[62510]: DEBUG oslo.service.loopingcall [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1618.967833] env[62510]: DEBUG nova.compute.manager [-] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1618.968185] env[62510]: DEBUG nova.network.neutron [-] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1618.981281] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768793, 'name': ReconfigVM_Task, 'duration_secs': 0.642419} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.983167] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 0d27da5c-20f3-4df1-86d2-036c904fd657/0d27da5c-20f3-4df1-86d2-036c904fd657.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1618.983167] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1166cfc3-de23-4280-85ac-2563ffaac24b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.996236] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1618.996236] env[62510]: value = "task-1768797" [ 1618.996236] env[62510]: _type = "Task" [ 1618.996236] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.013899] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768797, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.044111] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4082c414-bf38-47ee-876e-446f85d58dcc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.055659] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4833bec5-1f57-45f9-90a0-6672f17e1fc8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.103178] env[62510]: DEBUG nova.compute.manager [req-37a5b79a-1e9d-45f3-a371-68ef2ed278e3 req-bd3a80fd-e4a0-4eef-aba0-d89e65a77194 service nova] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Detach interface failed, port_id=6eb8d1eb-fc0d-41fd-a107-12a791bcd483, reason: Instance b5ff2a10-3c76-469a-86e0-ed3b135bca37 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1619.156118] env[62510]: DEBUG oslo_vmware.api [None req-e06fbd57-1db1-4f1b-a756-546c9b61a667 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768796, 'name': SuspendVM_Task} progress is 58%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.285487] env[62510]: ERROR nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [req-258a50cd-2942-4bc3-b68a-a0f75241ace1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-258a50cd-2942-4bc3-b68a-a0f75241ace1"}]} [ 1619.304214] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529afa15-7978-9ad3-dc86-3b15d551b94b, 'name': SearchDatastore_Task, 'duration_secs': 0.06995} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.306502] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1619.310471] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.310794] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1619.311073] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.311429] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.311753] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1619.315325] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c06905c-6384-4977-877d-55d6dee54834 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.316412] env[62510]: INFO nova.compute.manager [-] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Took 1.51 seconds to deallocate network for instance. [ 1619.326733] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1619.327018] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1619.328019] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4638307b-d84a-4074-8d30-d2d39e36e952 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.333422] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1619.333732] env[62510]: DEBUG nova.compute.provider_tree [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1619.337687] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1619.337687] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d90331-8908-d3ad-f0aa-88b568b61d89" [ 1619.337687] env[62510]: _type = "Task" [ 1619.337687] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.353347] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d90331-8908-d3ad-f0aa-88b568b61d89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.356978] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1619.384672] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1619.509528] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768797, 'name': Rename_Task, 'duration_secs': 0.377598} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.512739] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1619.513399] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4154d073-3d81-4eca-865c-36cb95e9f5cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.522256] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1619.522256] env[62510]: value = "task-1768798" [ 1619.522256] env[62510]: _type = "Task" [ 1619.522256] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.546948] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768798, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.655966] env[62510]: DEBUG oslo_vmware.api [None req-e06fbd57-1db1-4f1b-a756-546c9b61a667 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768796, 'name': SuspendVM_Task, 'duration_secs': 1.003444} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.659367] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e06fbd57-1db1-4f1b-a756-546c9b61a667 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Suspended the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1619.659580] env[62510]: DEBUG nova.compute.manager [None req-e06fbd57-1db1-4f1b-a756-546c9b61a667 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1619.660645] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49007223-9427-407e-bd1f-ff14032f096e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.824941] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.848909] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d90331-8908-d3ad-f0aa-88b568b61d89, 'name': SearchDatastore_Task, 'duration_secs': 0.01829} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.852804] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2226a146-79b2-42e9-b4a4-ca56ba3c6601 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.859953] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1619.859953] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e9c6cf-2747-4ac7-6492-0f1d194c6a87" [ 1619.859953] env[62510]: _type = "Task" [ 1619.859953] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.870194] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e9c6cf-2747-4ac7-6492-0f1d194c6a87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.934027] env[62510]: DEBUG nova.network.neutron [-] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.937785] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e939e3ec-dfe1-4f2d-86aa-ca20346f40aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.948985] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0804b1e-5678-4398-b25e-5740676e5d95 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.999631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a25843-b737-4b47-944b-f97150ab6afc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.002778] env[62510]: DEBUG nova.compute.manager [req-24376c9b-7c50-42d3-a36e-47cc4a76d9d9 req-211aefbc-5bba-4595-8af7-902d5c1134ee service nova] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Received event network-vif-deleted-30661ae8-22f8-4f9e-91d9-67d7a31e134c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1620.012368] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f69717-a20e-42a8-8d2f-0fd818a8cb61 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.029027] env[62510]: DEBUG nova.compute.provider_tree [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1620.040790] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768798, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.376829] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e9c6cf-2747-4ac7-6492-0f1d194c6a87, 'name': SearchDatastore_Task, 'duration_secs': 0.036866} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.377164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.377405] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095/f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1620.377688] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e04a93b3-0e96-4a63-9271-1d4511799208 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.387410] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1620.387410] env[62510]: value = "task-1768799" [ 1620.387410] env[62510]: _type = "Task" [ 1620.387410] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.396753] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.434404] env[62510]: INFO nova.compute.manager [-] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Took 1.47 seconds to deallocate network for instance. [ 1620.552027] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768798, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.568432] env[62510]: ERROR nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [req-08cab47c-1fc9-4d9f-8182-5925aff37085] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-08cab47c-1fc9-4d9f-8182-5925aff37085"}]} [ 1620.575560] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.575879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.597050] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1620.619141] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1620.619439] env[62510]: DEBUG nova.compute.provider_tree [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1620.640740] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1620.672040] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1620.902023] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768799, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.941643] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.044860] env[62510]: DEBUG oslo_vmware.api [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768798, 'name': PowerOnVM_Task, 'duration_secs': 1.039842} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.049026] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1621.049302] env[62510]: INFO nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Took 9.37 seconds to spawn the instance on the hypervisor. [ 1621.049504] env[62510]: DEBUG nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1621.050456] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c5d7f6-27c4-4bbe-b47d-cd5d3a26aefb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.079393] env[62510]: DEBUG nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1621.265365] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f1d870-66ed-4828-8eee-b4964482b9a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.276214] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f65ad64-db6e-42e5-a043-a40f64659c09 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.312471] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56c2daa-728a-4131-8351-7e2c83f925d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.322132] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d592372a-4b64-4362-9242-d6f17e217d0b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.342752] env[62510]: DEBUG nova.compute.provider_tree [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.400691] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768799, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565862} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.400691] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095/f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1621.400691] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1621.401084] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97e3a50c-e2f6-4e23-a880-52638d10096a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.411056] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1621.411056] env[62510]: value = "task-1768800" [ 1621.411056] env[62510]: _type = "Task" [ 1621.411056] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.422471] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.576320] env[62510]: INFO nova.compute.manager [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Took 36.79 seconds to build instance. [ 1621.610161] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.848729] env[62510]: DEBUG nova.scheduler.client.report [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1621.922669] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129653} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.923040] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1621.923859] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030d419d-cb95-447f-bd10-2eaa00bec65a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.951168] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095/f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1621.951502] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3292aa7-876c-484f-b276-025f090ecf07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.974196] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1621.974196] env[62510]: value = "task-1768801" [ 1621.974196] env[62510]: _type = "Task" [ 1621.974196] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.982545] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.996913] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "31fe5643-dece-484f-92d6-7c7cafbd51e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.997176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.078478] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dfff32ba-b70b-42db-a979-837943df7c27 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.116s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.177544] env[62510]: DEBUG nova.network.neutron [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Port 21d515b9-b00f-45cc-9437-318ee6bba755 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1622.177544] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.177544] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.177544] env[62510]: DEBUG nova.network.neutron [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1622.358515] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.347s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.358851] env[62510]: DEBUG nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1622.361941] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 28.384s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.417069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] Acquiring lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.417069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] Acquired lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.417069] env[62510]: DEBUG nova.network.neutron [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1622.488955] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.500866] env[62510]: DEBUG nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1622.520612] env[62510]: DEBUG nova.compute.manager [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1622.521559] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362da2d6-e9a0-47a5-a051-e16cb91bc795 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.868853] env[62510]: DEBUG nova.compute.utils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1622.873709] env[62510]: INFO nova.compute.claims [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1622.883024] env[62510]: DEBUG nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1622.883024] env[62510]: DEBUG nova.network.neutron [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1622.923561] env[62510]: DEBUG nova.network.neutron [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance_info_cache with network_info: [{"id": "21d515b9-b00f-45cc-9437-318ee6bba755", "address": "fa:16:3e:66:30:96", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d515b9-b0", "ovs_interfaceid": "21d515b9-b00f-45cc-9437-318ee6bba755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.991575] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768801, 'name': ReconfigVM_Task, 'duration_secs': 0.717101} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.991575] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Reconfigured VM instance instance-0000003b to attach disk [datastore1] f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095/f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1622.991575] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-769a063f-71c5-45a6-a3ce-d0a21cca8da5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.000102] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1623.000102] env[62510]: value = "task-1768802" [ 1623.000102] env[62510]: _type = "Task" [ 1623.000102] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.014602] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768802, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.019335] env[62510]: DEBUG nova.policy [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29251dc4243a44669bae0609008a88e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f85ce3c02964d36a77221ba8235978c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1623.038472] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.047920] env[62510]: INFO nova.compute.manager [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] instance snapshotting [ 1623.048986] env[62510]: WARNING nova.compute.manager [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1623.052112] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52af6726-2b54-4064-8bc0-d09b6f3289b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.074803] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071b2a2f-3f69-4af8-a32a-60fe8691bbf9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.159282] env[62510]: DEBUG nova.network.neutron [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Updating instance_info_cache with network_info: [{"id": "648008fa-8f2b-4cb2-a911-200874a59cc0", "address": "fa:16:3e:1d:0c:fd", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648008fa-8f", "ovs_interfaceid": "648008fa-8f2b-4cb2-a911-200874a59cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.378744] env[62510]: DEBUG nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1623.389220] env[62510]: INFO nova.compute.resource_tracker [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating resource usage from migration 09ab0c5c-8b35-4fbb-82b3-775e36c61415 [ 1623.429439] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.513809] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768802, 'name': Rename_Task, 'duration_secs': 0.19002} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.516254] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1623.516254] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baa5c4af-0e69-44ac-b6ff-f43ccb7f63f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.524018] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1623.524018] env[62510]: value = "task-1768803" [ 1623.524018] env[62510]: _type = "Task" [ 1623.524018] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.534728] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768803, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.590636] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1623.591074] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-61fc7d3e-b793-41c3-9b65-aa2bb397d431 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.602435] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1623.602435] env[62510]: value = "task-1768804" [ 1623.602435] env[62510]: _type = "Task" [ 1623.602435] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.612581] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768804, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.666019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] Releasing lock "refresh_cache-0d27da5c-20f3-4df1-86d2-036c904fd657" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.666019] env[62510]: DEBUG nova.compute.manager [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Inject network info {{(pid=62510) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7600}} [ 1623.666019] env[62510]: DEBUG nova.compute.manager [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] network_info to inject: |[{"id": "648008fa-8f2b-4cb2-a911-200874a59cc0", "address": "fa:16:3e:1d:0c:fd", "network": {"id": "f122ba1e-a858-4704-b83d-f76156f060fc", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1645239499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e39188afd4e94f01a5b3f1ec78cf70e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap648008fa-8f", "ovs_interfaceid": "648008fa-8f2b-4cb2-a911-200874a59cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7601}} [ 1623.670223] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Reconfiguring VM instance to set the machine id {{(pid=62510) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1623.673126] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e26bc54-9dcf-4576-aa7a-c52f48f3f16f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.694023] env[62510]: DEBUG oslo_vmware.api [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] Waiting for the task: (returnval){ [ 1623.694023] env[62510]: value = "task-1768805" [ 1623.694023] env[62510]: _type = "Task" [ 1623.694023] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.704703] env[62510]: DEBUG oslo_vmware.api [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] Task: {'id': task-1768805, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.847473] env[62510]: DEBUG nova.network.neutron [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Successfully created port: 6e3b34de-9819-4bbf-8565-8fd4f61417d3 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1623.934920] env[62510]: DEBUG nova.compute.manager [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62510) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1623.935158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.003777] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37776316-ed68-470d-813b-21e323fbfbdc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.013237] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5652d561-de1a-434e-a7d4-a8a89e23c1d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.050271] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa44923f-b8e9-49e7-ac15-74539afe7da5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.062930] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e0c433-3c52-4f4c-9861-d3e9059be3f3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.067117] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768803, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.078406] env[62510]: DEBUG nova.compute.provider_tree [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.113488] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768804, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.203453] env[62510]: DEBUG oslo_vmware.api [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] Task: {'id': task-1768805, 'name': ReconfigVM_Task, 'duration_secs': 0.191186} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.203749] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7c3409e5-1124-45f4-b64b-e81e8d88f13a tempest-ServersAdminTestJSON-232086773 tempest-ServersAdminTestJSON-232086773-project-admin] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Reconfigured VM instance to set the machine id {{(pid=62510) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1624.388591] env[62510]: DEBUG nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1624.422838] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1624.423125] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1624.423290] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1624.423474] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1624.423705] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1624.423860] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1624.424096] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1624.424269] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1624.424437] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1624.424603] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1624.424776] env[62510]: DEBUG nova.virt.hardware [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1624.425754] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606d4cd9-0557-4c53-86c9-01792ea754e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.434685] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de821e4-59e1-4fbc-b3da-296e3b730b42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.555709] env[62510]: DEBUG oslo_vmware.api [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1768803, 'name': PowerOnVM_Task, 'duration_secs': 0.782527} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.556036] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1624.556256] env[62510]: INFO nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Took 10.12 seconds to spawn the instance on the hypervisor. [ 1624.556458] env[62510]: DEBUG nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1624.557260] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5d1467-a28c-49f5-9c02-819aa6f17f3a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.581750] env[62510]: DEBUG nova.scheduler.client.report [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1624.615018] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768804, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.079933] env[62510]: INFO nova.compute.manager [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Took 39.36 seconds to build instance. [ 1625.088477] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.726s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.088567] env[62510]: INFO nova.compute.manager [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Migrating [ 1625.096799] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.302s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.097458] env[62510]: DEBUG nova.objects.instance [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lazy-loading 'resources' on Instance uuid bd21dd81-c0d9-4ff1-9183-0b4622dc5afb {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1625.128315] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768804, 'name': CreateSnapshot_Task, 'duration_secs': 1.113381} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.128315] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1625.128315] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0b0516-156b-48e4-9d6e-f0acaff5b1ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.583179] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d308239c-c4c1-4c62-848d-71d88566e2a3 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.647s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.619159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.619352] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.619556] env[62510]: DEBUG nova.network.neutron [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1625.649222] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1625.652535] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a8b08c9e-5c0c-4b0f-9d6c-da2c9efa1d2c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.662687] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1625.662687] env[62510]: value = "task-1768806" [ 1625.662687] env[62510]: _type = "Task" [ 1625.662687] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.673186] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768806, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.171428] env[62510]: DEBUG nova.compute.manager [req-e7897604-57e0-4df9-ae89-3000209eac3b req-343b93e4-d8a2-492f-92df-46550df40b67 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Received event network-vif-plugged-6e3b34de-9819-4bbf-8565-8fd4f61417d3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1626.171696] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7897604-57e0-4df9-ae89-3000209eac3b req-343b93e4-d8a2-492f-92df-46550df40b67 service nova] Acquiring lock "3df19233-2448-4030-ae1d-a4f98ccffba9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.172341] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7897604-57e0-4df9-ae89-3000209eac3b req-343b93e4-d8a2-492f-92df-46550df40b67 service nova] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.172468] env[62510]: DEBUG oslo_concurrency.lockutils [req-e7897604-57e0-4df9-ae89-3000209eac3b req-343b93e4-d8a2-492f-92df-46550df40b67 service nova] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.172658] env[62510]: DEBUG nova.compute.manager [req-e7897604-57e0-4df9-ae89-3000209eac3b req-343b93e4-d8a2-492f-92df-46550df40b67 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] No waiting events found dispatching network-vif-plugged-6e3b34de-9819-4bbf-8565-8fd4f61417d3 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1626.172845] env[62510]: WARNING nova.compute.manager [req-e7897604-57e0-4df9-ae89-3000209eac3b req-343b93e4-d8a2-492f-92df-46550df40b67 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Received unexpected event network-vif-plugged-6e3b34de-9819-4bbf-8565-8fd4f61417d3 for instance with vm_state building and task_state spawning. [ 1626.183974] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768806, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.230306] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b3d7cf-cbce-4711-9ab9-5c86adced3c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.245028] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43bb7f07-cd06-4cb7-a20a-359a4ec7809c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.288476] env[62510]: DEBUG nova.network.neutron [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Successfully updated port: 6e3b34de-9819-4bbf-8565-8fd4f61417d3 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1626.288476] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eebb7d6-8be0-49fe-bc74-5cbee4b1ff7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.299192] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381b944c-8d5c-4432-8318-890c118e057e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.315568] env[62510]: DEBUG nova.compute.provider_tree [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1626.515842] env[62510]: DEBUG nova.compute.manager [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Received event network-changed-5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1626.515842] env[62510]: DEBUG nova.compute.manager [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Refreshing instance network info cache due to event network-changed-5550cd5d-e9b6-4414-a8e4-e7c6875d2399. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1626.515842] env[62510]: DEBUG oslo_concurrency.lockutils [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.515975] env[62510]: DEBUG oslo_concurrency.lockutils [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.517441] env[62510]: DEBUG nova.network.neutron [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Refreshing network info cache for port 5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1626.550377] env[62510]: DEBUG nova.network.neutron [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance_info_cache with network_info: [{"id": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "address": "fa:16:3e:bd:11:3e", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8597503b-d7", "ovs_interfaceid": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.675358] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768806, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.793620] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "refresh_cache-3df19233-2448-4030-ae1d-a4f98ccffba9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.793902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "refresh_cache-3df19233-2448-4030-ae1d-a4f98ccffba9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.794087] env[62510]: DEBUG nova.network.neutron [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1626.821896] env[62510]: DEBUG nova.scheduler.client.report [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1627.057786] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.064792] env[62510]: INFO nova.compute.manager [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Rebuilding instance [ 1627.111726] env[62510]: DEBUG nova.compute.manager [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1627.112991] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea2c0e4b-4bf8-4370-beb9-aa2236adc54f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.177262] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768806, 'name': CloneVM_Task, 'duration_secs': 1.278593} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.177532] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Created linked-clone VM from snapshot [ 1627.178623] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02cd25a-a8c5-4334-829f-550dd2a80497 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.187715] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Uploading image ad780d0f-1ac1-44e1-9cf6-81c91b73810c {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1627.216176] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1627.216176] env[62510]: value = "vm-367352" [ 1627.216176] env[62510]: _type = "VirtualMachine" [ 1627.216176] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1627.216471] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d67f5e07-b3a1-4bd0-b354-369492b97cde {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.224861] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease: (returnval){ [ 1627.224861] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52eff64c-6201-be37-42ce-b0bf88846800" [ 1627.224861] env[62510]: _type = "HttpNfcLease" [ 1627.224861] env[62510]: } obtained for exporting VM: (result){ [ 1627.224861] env[62510]: value = "vm-367352" [ 1627.224861] env[62510]: _type = "VirtualMachine" [ 1627.224861] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1627.225383] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the lease: (returnval){ [ 1627.225383] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52eff64c-6201-be37-42ce-b0bf88846800" [ 1627.225383] env[62510]: _type = "HttpNfcLease" [ 1627.225383] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1627.234549] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1627.234549] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52eff64c-6201-be37-42ce-b0bf88846800" [ 1627.234549] env[62510]: _type = "HttpNfcLease" [ 1627.234549] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1627.327583] env[62510]: DEBUG nova.network.neutron [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1627.331239] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.233s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.331751] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.123s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.333876] env[62510]: DEBUG nova.objects.instance [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lazy-loading 'resources' on Instance uuid b7c2c768-573b-4c1c-ade7-45fb87b95d41 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1627.360842] env[62510]: INFO nova.scheduler.client.report [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Deleted allocations for instance bd21dd81-c0d9-4ff1-9183-0b4622dc5afb [ 1627.401855] env[62510]: DEBUG nova.network.neutron [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updated VIF entry in instance network info cache for port 5550cd5d-e9b6-4414-a8e4-e7c6875d2399. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1627.402958] env[62510]: DEBUG nova.network.neutron [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.636624] env[62510]: DEBUG nova.network.neutron [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Updating instance_info_cache with network_info: [{"id": "6e3b34de-9819-4bbf-8565-8fd4f61417d3", "address": "fa:16:3e:bd:e1:31", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3b34de-98", "ovs_interfaceid": "6e3b34de-9819-4bbf-8565-8fd4f61417d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.733720] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1627.733720] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52eff64c-6201-be37-42ce-b0bf88846800" [ 1627.733720] env[62510]: _type = "HttpNfcLease" [ 1627.733720] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1627.734131] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1627.734131] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52eff64c-6201-be37-42ce-b0bf88846800" [ 1627.734131] env[62510]: _type = "HttpNfcLease" [ 1627.734131] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1627.734827] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6108f6e3-fcf1-4348-a113-6727ccebeacc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.743257] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dc610c-ebfd-5364-d3ee-3fef4f922551/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1627.743426] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dc610c-ebfd-5364-d3ee-3fef4f922551/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1627.874591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc36641a-cd6d-4db0-810b-e1128fe0d78f tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.141s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.874591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 28.905s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.874591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.874591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.874591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.883799] env[62510]: INFO nova.compute.manager [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Terminating instance [ 1627.907487] env[62510]: DEBUG oslo_concurrency.lockutils [req-58ff1e56-bc8c-48f5-a91f-82255190bc07 req-331eaacb-53ec-4d6b-ac87-c4d2cf0351ee service nova] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.912400] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6d399a84-45f0-4a9f-bba8-1d98b7a91d14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.129064] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1628.129475] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe2302aa-28a2-4a22-a46c-11ae5afa5b8b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.140963] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "refresh_cache-3df19233-2448-4030-ae1d-a4f98ccffba9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.141433] env[62510]: DEBUG nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Instance network_info: |[{"id": "6e3b34de-9819-4bbf-8565-8fd4f61417d3", "address": "fa:16:3e:bd:e1:31", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3b34de-98", "ovs_interfaceid": "6e3b34de-9819-4bbf-8565-8fd4f61417d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1628.141935] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1628.141935] env[62510]: value = "task-1768808" [ 1628.141935] env[62510]: _type = "Task" [ 1628.141935] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.142812] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:e1:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e85cbc56-fee0-41f7-bc70-64f31775ce92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e3b34de-9819-4bbf-8565-8fd4f61417d3', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1628.152043] env[62510]: DEBUG oslo.service.loopingcall [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.158521] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1628.163833] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-168d98b8-11d5-4e86-be88-ed5ec6459368 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.194559] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768808, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.196167] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1628.196167] env[62510]: value = "task-1768809" [ 1628.196167] env[62510]: _type = "Task" [ 1628.196167] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.209647] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768809, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.388227] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.388393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquired lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.389227] env[62510]: DEBUG nova.network.neutron [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1628.449486] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fdfb94-f463-4289-9981-b195d0679c40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.459277] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399bbf59-6af0-4f72-b66e-a70380058d26 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.494169] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8f9c83-2bca-497a-a5e2-47013000089f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.504330] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e740d30-8140-48f6-9556-be0cea4acc2d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.522276] env[62510]: DEBUG nova.compute.provider_tree [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.574295] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e197a5-de21-474f-8940-9418cce3a1aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.595274] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance '350d5f83-d9ce-4997-bf57-70c4a4e22ba0' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1628.642012] env[62510]: DEBUG nova.compute.manager [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Received event network-changed-6e3b34de-9819-4bbf-8565-8fd4f61417d3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1628.642012] env[62510]: DEBUG nova.compute.manager [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Refreshing instance network info cache due to event network-changed-6e3b34de-9819-4bbf-8565-8fd4f61417d3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1628.642012] env[62510]: DEBUG oslo_concurrency.lockutils [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] Acquiring lock "refresh_cache-3df19233-2448-4030-ae1d-a4f98ccffba9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.642012] env[62510]: DEBUG oslo_concurrency.lockutils [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] Acquired lock "refresh_cache-3df19233-2448-4030-ae1d-a4f98ccffba9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.642942] env[62510]: DEBUG nova.network.neutron [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Refreshing network info cache for port 6e3b34de-9819-4bbf-8565-8fd4f61417d3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1628.662525] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768808, 'name': PowerOffVM_Task, 'duration_secs': 0.319593} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.663501] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1628.663501] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1628.664526] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f761c462-37ae-480f-9c23-7da7bb3d8cce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.673467] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1628.673467] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0be348c-3e4f-45e0-aa78-e8b78cff22f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.707360] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768809, 'name': CreateVM_Task, 'duration_secs': 0.40816} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.707639] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1628.708470] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.708732] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.709278] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1628.709586] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd4eaa2b-1058-47ec-b051-08f6608a32fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.715156] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1628.715156] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52707bb9-8a16-b3f9-79e7-a191af34881c" [ 1628.715156] env[62510]: _type = "Task" [ 1628.715156] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.724330] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52707bb9-8a16-b3f9-79e7-a191af34881c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.803952] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1628.803952] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1628.803952] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleting the datastore file [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1628.803952] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcbd1033-a283-4fd9-a695-a25b70be1a49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.811104] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1628.811104] env[62510]: value = "task-1768811" [ 1628.811104] env[62510]: _type = "Task" [ 1628.811104] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.824472] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.892800] env[62510]: DEBUG nova.compute.utils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Can not refresh info_cache because instance was not found {{(pid=62510) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1628.920093] env[62510]: DEBUG nova.network.neutron [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1628.944883] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "841460b0-d917-44ea-88c6-0e5a3022f658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.945480] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.024388] env[62510]: DEBUG nova.scheduler.client.report [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1629.057078] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "b004fba7-13e0-40f0-827d-8d09b7717176" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.057353] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "b004fba7-13e0-40f0-827d-8d09b7717176" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.057572] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "b004fba7-13e0-40f0-827d-8d09b7717176-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.057761] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "b004fba7-13e0-40f0-827d-8d09b7717176-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.057927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "b004fba7-13e0-40f0-827d-8d09b7717176-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.060743] env[62510]: INFO nova.compute.manager [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Terminating instance [ 1629.095572] env[62510]: DEBUG nova.network.neutron [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.101746] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1629.102279] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b86eaa5-c267-4b5c-b8a6-42d774ed035a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.113541] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1629.113541] env[62510]: value = "task-1768812" [ 1629.113541] env[62510]: _type = "Task" [ 1629.113541] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.125851] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.229832] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52707bb9-8a16-b3f9-79e7-a191af34881c, 'name': SearchDatastore_Task, 'duration_secs': 0.014726} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.230260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.230576] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1629.230858] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.231038] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.231249] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1629.231638] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-590db163-d7a0-4bb5-a822-f649fcbb0751 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.242733] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1629.242733] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1629.243184] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eae9d435-9323-4fe8-8da0-e0b40bb74e88 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.250463] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1629.250463] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52727ace-44da-6dea-962c-e1494d0d23aa" [ 1629.250463] env[62510]: _type = "Task" [ 1629.250463] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.261945] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52727ace-44da-6dea-962c-e1494d0d23aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.326835] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30867} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.327150] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1629.327387] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1629.327638] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1629.450112] env[62510]: DEBUG nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1629.498204] env[62510]: DEBUG nova.network.neutron [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Updated VIF entry in instance network info cache for port 6e3b34de-9819-4bbf-8565-8fd4f61417d3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1629.498204] env[62510]: DEBUG nova.network.neutron [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Updating instance_info_cache with network_info: [{"id": "6e3b34de-9819-4bbf-8565-8fd4f61417d3", "address": "fa:16:3e:bd:e1:31", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3b34de-98", "ovs_interfaceid": "6e3b34de-9819-4bbf-8565-8fd4f61417d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.530286] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.198s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.532615] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.838s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.534535] env[62510]: INFO nova.compute.claims [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1629.550479] env[62510]: INFO nova.scheduler.client.report [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Deleted allocations for instance b7c2c768-573b-4c1c-ade7-45fb87b95d41 [ 1629.564444] env[62510]: DEBUG nova.compute.manager [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1629.564674] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1629.565607] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab859452-a518-4a96-9a82-05790245ced8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.575772] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1629.576762] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f837e79b-96ca-4f70-952c-647e40c94481 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.584813] env[62510]: DEBUG oslo_vmware.api [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1629.584813] env[62510]: value = "task-1768813" [ 1629.584813] env[62510]: _type = "Task" [ 1629.584813] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.595392] env[62510]: DEBUG oslo_vmware.api [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.598100] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Releasing lock "refresh_cache-bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.598529] env[62510]: DEBUG nova.compute.manager [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1629.598727] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1629.599812] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f33eeed3-4a0e-4aca-af62-8196cb651602 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.609740] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c0c3a8-1008-466c-8dea-1c4f5d9459a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.635548] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768812, 'name': PowerOffVM_Task, 'duration_secs': 0.208445} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.635915] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1629.636118] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance '350d5f83-d9ce-4997-bf57-70c4a4e22ba0' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1629.654842] env[62510]: WARNING nova.virt.vmwareapi.vmops [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bd21dd81-c0d9-4ff1-9183-0b4622dc5afb could not be found. [ 1629.655102] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1629.655341] env[62510]: INFO nova.compute.manager [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1629.655658] env[62510]: DEBUG oslo.service.loopingcall [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1629.656052] env[62510]: DEBUG nova.compute.manager [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1629.656180] env[62510]: DEBUG nova.network.neutron [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1629.685113] env[62510]: DEBUG nova.network.neutron [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1629.763213] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52727ace-44da-6dea-962c-e1494d0d23aa, 'name': SearchDatastore_Task, 'duration_secs': 0.01766} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.764373] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52b234d3-215e-44e1-ac58-ba4c2344b2b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.770420] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1629.770420] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52de5516-5acd-24d2-de7c-472335f2fb8e" [ 1629.770420] env[62510]: _type = "Task" [ 1629.770420] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.778806] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52de5516-5acd-24d2-de7c-472335f2fb8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.973735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.001448] env[62510]: DEBUG oslo_concurrency.lockutils [req-e81bc818-f449-4ed1-becc-4cede89d94b1 req-ee2c0ece-080a-4192-b9d7-6c504819aba3 service nova] Releasing lock "refresh_cache-3df19233-2448-4030-ae1d-a4f98ccffba9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.058493] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3b9a889c-1523-4f8a-954c-799bb67b2b97 tempest-ListImageFiltersTestJSON-178793928 tempest-ListImageFiltersTestJSON-178793928-project-member] Lock "b7c2c768-573b-4c1c-ade7-45fb87b95d41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.898s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.095394] env[62510]: DEBUG oslo_vmware.api [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768813, 'name': PowerOffVM_Task, 'duration_secs': 0.416155} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.095669] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1630.095873] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1630.096147] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-292613bf-6025-4318-905d-dd7b6fb82f80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.142320] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1630.142572] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1630.142727] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1630.142910] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1630.143075] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1630.143227] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1630.143430] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1630.143651] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1630.143864] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1630.144046] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1630.144257] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1630.149512] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9caee1b6-eb54-4bd3-a5e3-c69b2d01d450 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.167186] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1630.167186] env[62510]: value = "task-1768815" [ 1630.167186] env[62510]: _type = "Task" [ 1630.167186] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.175663] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768815, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.191331] env[62510]: DEBUG nova.network.neutron [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1630.282577] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52de5516-5acd-24d2-de7c-472335f2fb8e, 'name': SearchDatastore_Task, 'duration_secs': 0.017915} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.282900] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.283246] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 3df19233-2448-4030-ae1d-a4f98ccffba9/3df19233-2448-4030-ae1d-a4f98ccffba9.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1630.283536] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bf18f714-a4f3-43e4-880e-235fa4b9ad14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.292687] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1630.292687] env[62510]: value = "task-1768816" [ 1630.292687] env[62510]: _type = "Task" [ 1630.292687] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.302651] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.362998] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1630.363153] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1630.363253] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1630.363447] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1630.363650] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1630.363815] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1630.364043] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1630.364210] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1630.364389] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1630.364546] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1630.364977] env[62510]: DEBUG nova.virt.hardware [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1630.365654] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2e87be-8360-43f6-99bc-14254ef44945 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.376293] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69586228-0c67-47ff-962d-b37f6069bf49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.392291] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:5b:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1630.400620] env[62510]: DEBUG oslo.service.loopingcall [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1630.400969] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1630.401288] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-54b0106a-2895-4b62-8845-4ba55966af21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.421527] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1630.421527] env[62510]: value = "task-1768817" [ 1630.421527] env[62510]: _type = "Task" [ 1630.421527] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.429977] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768817, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.677691] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768815, 'name': ReconfigVM_Task, 'duration_secs': 0.273304} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.680613] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance '350d5f83-d9ce-4997-bf57-70c4a4e22ba0' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1630.696390] env[62510]: INFO nova.compute.manager [-] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Took 1.04 seconds to deallocate network for instance. [ 1630.808443] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768816, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.938383] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768817, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.032114] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fa3721-e3ad-472a-a17c-2e8cd0baf3d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.040203] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37718584-3e0a-491e-8dcc-5a1cde31cb54 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.072597] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b813953-878d-4d8e-ab06-f09aa8fb7975 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.081758] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f76bc3b-0195-4bc1-a4a4-c17190f7d04c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.096666] env[62510]: DEBUG nova.compute.provider_tree [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1631.187631] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1631.187880] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1631.188115] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1631.188394] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1631.188587] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1631.188803] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1631.189042] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1631.189209] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1631.189378] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1631.189541] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1631.189712] env[62510]: DEBUG nova.virt.hardware [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1631.195184] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1631.195489] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-374bdfba-6bc9-427a-a772-51cfa89da832 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.209274] env[62510]: INFO nova.compute.manager [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance disappeared during terminate [ 1631.209495] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e48f05f-7737-429b-9819-dbbacc307385 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "bd21dd81-c0d9-4ff1-9183-0b4622dc5afb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.336s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.218272] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1631.218272] env[62510]: value = "task-1768818" [ 1631.218272] env[62510]: _type = "Task" [ 1631.218272] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.230303] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768818, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.306446] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768816, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676979} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.307048] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 3df19233-2448-4030-ae1d-a4f98ccffba9/3df19233-2448-4030-ae1d-a4f98ccffba9.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1631.307256] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1631.307585] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1458199-8dba-4ac0-8711-05752422f7ef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.317239] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1631.317239] env[62510]: value = "task-1768819" [ 1631.317239] env[62510]: _type = "Task" [ 1631.317239] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.329024] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768819, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.432699] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768817, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.471662] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1631.471959] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1631.472179] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Deleting the datastore file [datastore1] b004fba7-13e0-40f0-827d-8d09b7717176 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1631.472536] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6dbffbc5-1f76-4a19-ab7e-a343747234b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.479572] env[62510]: DEBUG oslo_vmware.api [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for the task: (returnval){ [ 1631.479572] env[62510]: value = "task-1768820" [ 1631.479572] env[62510]: _type = "Task" [ 1631.479572] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.488240] env[62510]: DEBUG oslo_vmware.api [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.622530] env[62510]: ERROR nova.scheduler.client.report [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [req-024ae538-79c1-4ece-ae8f-f0763a9091cc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-024ae538-79c1-4ece-ae8f-f0763a9091cc"}]} [ 1631.641446] env[62510]: DEBUG nova.scheduler.client.report [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1631.658535] env[62510]: DEBUG nova.scheduler.client.report [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1631.658535] env[62510]: DEBUG nova.compute.provider_tree [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1631.670865] env[62510]: DEBUG nova.scheduler.client.report [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1631.690831] env[62510]: DEBUG nova.scheduler.client.report [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1631.731195] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768818, 'name': ReconfigVM_Task, 'duration_secs': 0.19852} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.735127] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1631.736189] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d13303e-12d3-4f60-bd69-fbffdaaad1e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.766373] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 350d5f83-d9ce-4997-bf57-70c4a4e22ba0/350d5f83-d9ce-4997-bf57-70c4a4e22ba0.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1631.769557] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab0d9391-e862-440b-8901-6efef700700d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.797146] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1631.797146] env[62510]: value = "task-1768821" [ 1631.797146] env[62510]: _type = "Task" [ 1631.797146] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.810805] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768821, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.829255] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768819, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08174} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.829571] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1631.830892] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184c40f8-e460-44f5-bf3d-5f7fff4641c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.859216] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 3df19233-2448-4030-ae1d-a4f98ccffba9/3df19233-2448-4030-ae1d-a4f98ccffba9.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1631.861364] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c1dc325-4479-4739-bc13-9c9eb8fb680a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.883493] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1631.883493] env[62510]: value = "task-1768822" [ 1631.883493] env[62510]: _type = "Task" [ 1631.883493] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.894380] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768822, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.937396] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768817, 'name': CreateVM_Task, 'duration_secs': 1.392356} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.937536] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1631.938384] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.938984] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1631.938984] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1631.942209] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47651a7e-dd38-4ce4-a264-da8d1d2de413 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.949239] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1631.949239] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d6b973-af19-d76a-c627-ff2162ac6ce0" [ 1631.949239] env[62510]: _type = "Task" [ 1631.949239] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.961338] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d6b973-af19-d76a-c627-ff2162ac6ce0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.990176] env[62510]: DEBUG oslo_vmware.api [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Task: {'id': task-1768820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318964} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.993410] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1631.993678] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1631.993935] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1631.994162] env[62510]: INFO nova.compute.manager [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Took 2.43 seconds to destroy the instance on the hypervisor. [ 1631.994457] env[62510]: DEBUG oslo.service.loopingcall [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1631.995054] env[62510]: DEBUG nova.compute.manager [-] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1631.995174] env[62510]: DEBUG nova.network.neutron [-] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1632.310449] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768821, 'name': ReconfigVM_Task, 'duration_secs': 0.330339} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.311622] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 350d5f83-d9ce-4997-bf57-70c4a4e22ba0/350d5f83-d9ce-4997-bf57-70c4a4e22ba0.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1632.311911] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance '350d5f83-d9ce-4997-bf57-70c4a4e22ba0' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1632.318935] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3595824c-dcde-4e25-8116-c5314421fdf1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.324124] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472a1f28-2540-45e7-a998-1c41408f3e0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.372686] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdc0fe2-07b3-4edd-b5c6-13383c0d94ef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.394173] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f366ed4-2bdc-4af1-9239-5d75a858bce4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.407213] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768822, 'name': ReconfigVM_Task, 'duration_secs': 0.33338} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.415781] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 3df19233-2448-4030-ae1d-a4f98ccffba9/3df19233-2448-4030-ae1d-a4f98ccffba9.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1632.417177] env[62510]: DEBUG nova.compute.provider_tree [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.418539] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9d49f7f-45c4-4844-ab4d-094de22618b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.428564] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1632.428564] env[62510]: value = "task-1768823" [ 1632.428564] env[62510]: _type = "Task" [ 1632.428564] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.439506] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768823, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.447606] env[62510]: DEBUG nova.compute.manager [req-db4fd59d-63fe-4eeb-b122-9f28502b33e5 req-79811672-70c9-4437-8602-f74e80d2d25c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Received event network-vif-deleted-b93d3484-b909-4060-aef6-1f45f91f2325 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1632.447812] env[62510]: INFO nova.compute.manager [req-db4fd59d-63fe-4eeb-b122-9f28502b33e5 req-79811672-70c9-4437-8602-f74e80d2d25c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Neutron deleted interface b93d3484-b909-4060-aef6-1f45f91f2325; detaching it from the instance and deleting it from the info cache [ 1632.447987] env[62510]: DEBUG nova.network.neutron [req-db4fd59d-63fe-4eeb-b122-9f28502b33e5 req-79811672-70c9-4437-8602-f74e80d2d25c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.462032] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d6b973-af19-d76a-c627-ff2162ac6ce0, 'name': SearchDatastore_Task, 'duration_secs': 0.015248} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.462967] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.463338] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1632.463720] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.463963] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.464369] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1632.465093] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7d654ab-74ba-4d5b-8df9-9cef98a56c5e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.478692] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1632.479672] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1632.479827] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c56ffe5a-920a-4f64-9aec-f8a8d29991e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.486669] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1632.486669] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529478ea-6d6d-2b76-ac20-0bcbe51e897d" [ 1632.486669] env[62510]: _type = "Task" [ 1632.486669] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.498224] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529478ea-6d6d-2b76-ac20-0bcbe51e897d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.821641] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bc631f-96c1-4362-b5bb-a0a28f63483e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.842462] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080993a7-8780-4481-a24b-8f5561dbd18a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.862995] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance '350d5f83-d9ce-4997-bf57-70c4a4e22ba0' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1632.889036] env[62510]: DEBUG nova.network.neutron [-] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.921897] env[62510]: DEBUG nova.scheduler.client.report [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1632.940539] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768823, 'name': Rename_Task, 'duration_secs': 0.165756} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.941528] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1632.941823] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4499bea8-683c-4c53-b728-4b66b4e548aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.950589] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1632.950589] env[62510]: value = "task-1768824" [ 1632.950589] env[62510]: _type = "Task" [ 1632.950589] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.950819] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e24d233c-0667-432d-8e27-8d2f7a647165 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.963388] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.967224] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcbccde-d88a-4da6-91f1-86b8b112f012 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.007828] env[62510]: DEBUG nova.compute.manager [req-db4fd59d-63fe-4eeb-b122-9f28502b33e5 req-79811672-70c9-4437-8602-f74e80d2d25c service nova] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Detach interface failed, port_id=b93d3484-b909-4060-aef6-1f45f91f2325, reason: Instance b004fba7-13e0-40f0-827d-8d09b7717176 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1633.016217] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529478ea-6d6d-2b76-ac20-0bcbe51e897d, 'name': SearchDatastore_Task, 'duration_secs': 0.033325} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.017116] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cff94b1-cbbd-4258-ae61-0dc9f3ab325b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.024028] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1633.024028] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52db88c9-f7ee-7a88-fb24-7536efa47696" [ 1633.024028] env[62510]: _type = "Task" [ 1633.024028] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.033678] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52db88c9-f7ee-7a88-fb24-7536efa47696, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.390875] env[62510]: INFO nova.compute.manager [-] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Took 1.40 seconds to deallocate network for instance. [ 1633.420156] env[62510]: DEBUG nova.network.neutron [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Port 8597503b-d757-44ff-91a7-6f52b3b75aa3 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1633.430027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.895s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.430027] env[62510]: DEBUG nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1633.433017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.186s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.433469] env[62510]: DEBUG nova.objects.instance [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lazy-loading 'resources' on Instance uuid 612e95d6-28ef-4c9a-b5d9-fd83122bfa44 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1633.467509] env[62510]: DEBUG oslo_vmware.api [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768824, 'name': PowerOnVM_Task, 'duration_secs': 0.502913} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.467669] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1633.467893] env[62510]: INFO nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Took 9.08 seconds to spawn the instance on the hypervisor. [ 1633.468085] env[62510]: DEBUG nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1633.468886] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc5f0e6-f4aa-4b0f-b201-1c0393b15c32 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.537080] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52db88c9-f7ee-7a88-fb24-7536efa47696, 'name': SearchDatastore_Task, 'duration_secs': 0.024257} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.537371] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.537636] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1633.538025] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b31bc98f-c1f9-458a-afe9-8abdfd86958a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.547277] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1633.547277] env[62510]: value = "task-1768825" [ 1633.547277] env[62510]: _type = "Task" [ 1633.547277] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.557015] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768825, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.898348] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.937762] env[62510]: DEBUG nova.compute.utils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1633.942510] env[62510]: DEBUG nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Not allocating networking since 'none' was specified. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1633.988047] env[62510]: INFO nova.compute.manager [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Took 43.02 seconds to build instance. [ 1634.059088] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768825, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.447864] env[62510]: DEBUG nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1634.457968] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.458453] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.458453] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.490453] env[62510]: DEBUG oslo_concurrency.lockutils [None req-01c108a3-bab3-4c67-8c45-7dd13aed0308 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.543s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.509715] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6800166f-3e2b-4e3b-bec4-f7bb3f1d125e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.518736] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a33e405-dd41-4763-9131-25f1f1c336fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.556748] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42da687-5a7f-44c4-bd3d-869144a1d496 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.570214] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9b5aca-11ca-44bd-864d-7d7258189db5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.573021] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768825, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610104} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.573152] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1634.573417] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1634.574032] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58074429-bb78-47b7-94b8-85d386fc1a52 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.584337] env[62510]: DEBUG nova.compute.provider_tree [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.587767] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1634.587767] env[62510]: value = "task-1768826" [ 1634.587767] env[62510]: _type = "Task" [ 1634.587767] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.598796] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.869781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "3df19233-2448-4030-ae1d-a4f98ccffba9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.869781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.869781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "3df19233-2448-4030-ae1d-a4f98ccffba9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.869781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.869781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.870443] env[62510]: INFO nova.compute.manager [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Terminating instance [ 1635.093744] env[62510]: DEBUG nova.scheduler.client.report [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1635.103404] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068307} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.103733] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1635.104599] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30366015-743a-43dd-a005-c998f2a5f185 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.135099] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.136934] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcea3a42-e941-42a2-8a2a-40f3263032d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.163030] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1635.163030] env[62510]: value = "task-1768827" [ 1635.163030] env[62510]: _type = "Task" [ 1635.163030] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.175256] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768827, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.375043] env[62510]: DEBUG nova.compute.manager [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1635.375043] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1635.375716] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe3bbe1-04ed-48ed-80a8-8106d0a26844 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.385040] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1635.385339] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06620e9f-373b-4724-8db7-a9331c0f7cca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.394443] env[62510]: DEBUG oslo_vmware.api [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1635.394443] env[62510]: value = "task-1768828" [ 1635.394443] env[62510]: _type = "Task" [ 1635.394443] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.404521] env[62510]: DEBUG oslo_vmware.api [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768828, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.473056] env[62510]: DEBUG nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1635.501734] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1635.501987] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1635.502168] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1635.502357] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1635.502496] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1635.502644] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1635.502851] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1635.503042] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1635.503188] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1635.503356] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1635.503524] env[62510]: DEBUG nova.virt.hardware [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1635.504484] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365375ae-2fbf-4890-8015-40d04295279f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.510193] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.510417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.510610] env[62510]: DEBUG nova.network.neutron [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1635.516529] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a2abb3-baf9-4b2e-b0e2-02c99add99c7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.535187] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1635.541480] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Creating folder: Project (e93350d34a964bf59ac765c4040363ab). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1635.542287] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a206df9-732e-49b7-9e82-b7c844aca34b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.556987] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Created folder: Project (e93350d34a964bf59ac765c4040363ab) in parent group-v367197. [ 1635.557227] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Creating folder: Instances. Parent ref: group-v367355. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1635.557469] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8bb9c368-cfc8-45a7-81a1-915cd4ec76ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.576928] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Created folder: Instances in parent group-v367355. [ 1635.577242] env[62510]: DEBUG oslo.service.loopingcall [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1635.577466] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1635.577680] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcbce3e3-0539-49fd-9ac7-35943d69d410 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.598915] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.166s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.603216] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.220s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.603582] env[62510]: DEBUG nova.objects.instance [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lazy-loading 'resources' on Instance uuid 9d5d29ea-be92-4881-9fc8-fea3f2f442d0 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1635.605502] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1635.605502] env[62510]: value = "task-1768831" [ 1635.605502] env[62510]: _type = "Task" [ 1635.605502] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.616722] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768831, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.628623] env[62510]: INFO nova.scheduler.client.report [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Deleted allocations for instance 612e95d6-28ef-4c9a-b5d9-fd83122bfa44 [ 1635.672627] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768827, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.907738] env[62510]: DEBUG oslo_vmware.api [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768828, 'name': PowerOffVM_Task, 'duration_secs': 0.222194} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.909393] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1635.909393] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1635.909393] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a2336fb-af26-446a-8a35-0346f672a743 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.121014] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768831, 'name': CreateVM_Task, 'duration_secs': 0.449198} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.121214] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1636.121657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.121814] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.122525] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1636.122525] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-035bd048-996f-4cc2-a6d2-abb34b01c0cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.137898] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1636.137898] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5256e2ea-a4b5-a7bb-e37e-a851c19bd60a" [ 1636.137898] env[62510]: _type = "Task" [ 1636.137898] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.146331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-daaf4048-a186-46d5-8731-1089c883ed22 tempest-ServersTestManualDisk-859263968 tempest-ServersTestManualDisk-859263968-project-member] Lock "612e95d6-28ef-4c9a-b5d9-fd83122bfa44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.832s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.157308] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5256e2ea-a4b5-a7bb-e37e-a851c19bd60a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.179264] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768827, 'name': ReconfigVM_Task, 'duration_secs': 0.519968} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.179264] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1636.179264] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1636.179264] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleting the datastore file [datastore1] 3df19233-2448-4030-ae1d-a4f98ccffba9 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1636.179554] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Reconfigured VM instance instance-00000015 to attach disk [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1636.180308] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27efc373-605e-442d-a02a-bcb0d92952bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.182178] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d5e01a8-d68b-4863-b009-de8a94cddb60 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.191106] env[62510]: DEBUG oslo_vmware.api [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1636.191106] env[62510]: value = "task-1768834" [ 1636.191106] env[62510]: _type = "Task" [ 1636.191106] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.195026] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1636.195026] env[62510]: value = "task-1768833" [ 1636.195026] env[62510]: _type = "Task" [ 1636.195026] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.216470] env[62510]: DEBUG oslo_vmware.api [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.216470] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768833, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.393692] env[62510]: DEBUG nova.network.neutron [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance_info_cache with network_info: [{"id": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "address": "fa:16:3e:bd:11:3e", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8597503b-d7", "ovs_interfaceid": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.644338] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dc610c-ebfd-5364-d3ee-3fef4f922551/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1636.647906] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a590721-b1e5-40f9-8851-d500a14a5a17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.657932] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dc610c-ebfd-5364-d3ee-3fef4f922551/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1636.657932] env[62510]: ERROR oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dc610c-ebfd-5364-d3ee-3fef4f922551/disk-0.vmdk due to incomplete transfer. [ 1636.662071] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-329481fd-c666-4303-ae8b-514a4d8f20ef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.664353] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5256e2ea-a4b5-a7bb-e37e-a851c19bd60a, 'name': SearchDatastore_Task, 'duration_secs': 0.021598} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.664947] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.665541] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1636.665804] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.666699] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.666699] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1636.670178] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71bd8c71-9a2e-4360-88b4-fba9d7ce963e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.677232] env[62510]: DEBUG oslo_vmware.rw_handles [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52dc610c-ebfd-5364-d3ee-3fef4f922551/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1636.677440] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Uploaded image ad780d0f-1ac1-44e1-9cf6-81c91b73810c to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1636.679814] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1636.680845] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1ba07936-adf8-4949-a92e-a8a65a962e7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.683838] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1636.683838] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1636.685578] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13560a67-d1c1-4831-b363-0bf1787eb636 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.692823] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1636.692823] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52920182-1ea4-cbf4-a7f7-36b0abaff3f6" [ 1636.692823] env[62510]: _type = "Task" [ 1636.692823] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.701035] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1636.701035] env[62510]: value = "task-1768835" [ 1636.701035] env[62510]: _type = "Task" [ 1636.701035] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.716916] env[62510]: DEBUG oslo_vmware.api [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290082} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.717821] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1636.717994] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1636.718450] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1636.718450] env[62510]: INFO nova.compute.manager [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1636.718985] env[62510]: DEBUG oslo.service.loopingcall [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1636.722162] env[62510]: DEBUG nova.compute.manager [-] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1636.722260] env[62510]: DEBUG nova.network.neutron [-] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1636.723868] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52920182-1ea4-cbf4-a7f7-36b0abaff3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.011175} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.735018] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768833, 'name': Rename_Task, 'duration_secs': 0.23001} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.735018] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768835, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.735018] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b40b8465-c219-4bd2-aa03-365b5b39dbbc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.736450] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1636.736695] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b3c723a-1822-4e6c-adbd-25929df24d8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.742946] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1636.742946] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b6bcca-480e-1252-44a4-337d59ffc101" [ 1636.742946] env[62510]: _type = "Task" [ 1636.742946] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.745627] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a8aa86-338a-49ba-8f85-d01151f62693 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.751082] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1636.751082] env[62510]: value = "task-1768836" [ 1636.751082] env[62510]: _type = "Task" [ 1636.751082] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.759837] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b6bcca-480e-1252-44a4-337d59ffc101, 'name': SearchDatastore_Task, 'duration_secs': 0.010682} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.761203] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e840ef64-4c46-4895-9362-ac21f72cc49b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.764552] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.764822] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1636.769070] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-99eac15e-91b6-4e05-a4d7-d57ebfba9c4f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.769906] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.799457] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06708bcf-7f9b-4463-8428-a5901cc222a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.804270] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1636.804270] env[62510]: value = "task-1768837" [ 1636.804270] env[62510]: _type = "Task" [ 1636.804270] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.812379] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa77b79-2f84-42ab-acde-807f659d9c28 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.819424] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.829979] env[62510]: DEBUG nova.compute.provider_tree [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.899730] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.217871] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768835, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.264914] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768836, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.315682] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768837, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.335851] env[62510]: DEBUG nova.scheduler.client.report [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1637.427227] env[62510]: DEBUG nova.compute.manager [req-1a80a88b-e22e-4cfa-bc57-dd2ad0eff3dc req-569a4658-00bf-4720-bfb5-eb8254ecb969 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Received event network-vif-deleted-6e3b34de-9819-4bbf-8565-8fd4f61417d3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1637.427620] env[62510]: INFO nova.compute.manager [req-1a80a88b-e22e-4cfa-bc57-dd2ad0eff3dc req-569a4658-00bf-4720-bfb5-eb8254ecb969 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Neutron deleted interface 6e3b34de-9819-4bbf-8565-8fd4f61417d3; detaching it from the instance and deleting it from the info cache [ 1637.428216] env[62510]: DEBUG nova.network.neutron [req-1a80a88b-e22e-4cfa-bc57-dd2ad0eff3dc req-569a4658-00bf-4720-bfb5-eb8254ecb969 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.444077] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b8047b-edd3-4357-916f-2dad82c096ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.470159] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd08f95-b3dd-4191-aa0d-d13d7260c00a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.486241] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance '350d5f83-d9ce-4997-bf57-70c4a4e22ba0' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1637.719479] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768835, 'name': Destroy_Task, 'duration_secs': 0.758991} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.719775] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Destroyed the VM [ 1637.720059] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1637.720348] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cb1dd29a-ef38-4a18-a74e-2b972792cfb3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.727944] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1637.727944] env[62510]: value = "task-1768838" [ 1637.727944] env[62510]: _type = "Task" [ 1637.727944] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.736213] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.764264] env[62510]: DEBUG oslo_vmware.api [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768836, 'name': PowerOnVM_Task, 'duration_secs': 0.855059} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.765266] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1637.765424] env[62510]: DEBUG nova.compute.manager [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1637.766277] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd6c02d-b10b-4f1e-86bb-22c9823bb5ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.780441] env[62510]: DEBUG nova.network.neutron [-] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.814465] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649671} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.815366] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1637.815639] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1637.816092] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d9e6ec7-f2ee-4c21-9c7f-8f840f9dab2e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.825730] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1637.825730] env[62510]: value = "task-1768839" [ 1637.825730] env[62510]: _type = "Task" [ 1637.825730] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.836929] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768839, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.840838] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.238s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.846227] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.761s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.847824] env[62510]: INFO nova.compute.claims [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1637.875146] env[62510]: INFO nova.scheduler.client.report [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Deleted allocations for instance 9d5d29ea-be92-4881-9fc8-fea3f2f442d0 [ 1637.931541] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d34232a5-7192-4234-b7b8-a7ab556230d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.944920] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25153b25-79eb-4107-b339-71541398bb2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.986508] env[62510]: DEBUG nova.compute.manager [req-1a80a88b-e22e-4cfa-bc57-dd2ad0eff3dc req-569a4658-00bf-4720-bfb5-eb8254ecb969 service nova] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Detach interface failed, port_id=6e3b34de-9819-4bbf-8565-8fd4f61417d3, reason: Instance 3df19233-2448-4030-ae1d-a4f98ccffba9 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1637.995571] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1637.995872] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3608218f-edad-40a9-9012-3e6624510cc6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.004302] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1638.004302] env[62510]: value = "task-1768840" [ 1638.004302] env[62510]: _type = "Task" [ 1638.004302] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.013986] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.239306] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.283529] env[62510]: INFO nova.compute.manager [-] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Took 1.56 seconds to deallocate network for instance. [ 1638.284944] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.339127] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768839, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06803} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.339454] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1638.340520] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0a7938-f1d2-43fa-9944-1f30c61ce35f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.367566] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1638.369554] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c324b2b-c65c-4216-95d4-1b317bb73d5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.395628] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0da0e7d6-938e-48c2-98c4-43f87173c67c tempest-ServersTestJSON-781763476 tempest-ServersTestJSON-781763476-project-member] Lock "9d5d29ea-be92-4881-9fc8-fea3f2f442d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.334s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.402612] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1638.402612] env[62510]: value = "task-1768841" [ 1638.402612] env[62510]: _type = "Task" [ 1638.402612] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.516010] env[62510]: DEBUG oslo_vmware.api [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768840, 'name': PowerOnVM_Task, 'duration_secs': 0.397247} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.516322] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1638.516500] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e3bb7b0-c873-4a02-824b-f30924541c5f tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance '350d5f83-d9ce-4997-bf57-70c4a4e22ba0' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1638.597444] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "d3e25d50-f315-439b-9e9f-8e454a0631d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.597711] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.597919] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "d3e25d50-f315-439b-9e9f-8e454a0631d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.598194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.598384] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.600417] env[62510]: INFO nova.compute.manager [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Terminating instance [ 1638.744703] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.789973] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.913203] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768841, 'name': ReconfigVM_Task, 'duration_secs': 0.378014} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.913491] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1638.914116] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12620e7c-8aa5-47f7-9b6c-b2e53bb8ccc4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.924054] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1638.924054] env[62510]: value = "task-1768842" [ 1638.924054] env[62510]: _type = "Task" [ 1638.924054] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.939407] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768842, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.105491] env[62510]: DEBUG nova.compute.manager [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1639.105706] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1639.106890] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4d8268-cebc-4603-ae1e-88021bc6baf8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.120904] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1639.121197] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c4ea99c-6ecf-4a32-9661-0d3eb13f3c77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.254265] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.317477] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "16b5d928-94fe-4fd5-9909-775c28d7edd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.317702] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.363637] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39843954-74c8-4aca-9ba2-097430ca525e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.376602] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc01de35-d909-40f4-b9bf-be7f12ef4380 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.420249] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-388b7e27-b102-42a0-af38-3e2954ffea04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.432018] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f151892-221e-4678-976c-1045306d1690 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.439218] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768842, 'name': Rename_Task, 'duration_secs': 0.226474} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.439885] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1639.440164] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84580985-9380-4ce0-a263-2aee1f3a3309 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.455250] env[62510]: DEBUG nova.compute.provider_tree [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1639.458020] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1639.458020] env[62510]: value = "task-1768844" [ 1639.458020] env[62510]: _type = "Task" [ 1639.458020] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.471353] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768844, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.746407] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.820188] env[62510]: DEBUG nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1639.976991] env[62510]: DEBUG oslo_vmware.api [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768844, 'name': PowerOnVM_Task, 'duration_secs': 0.506788} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.977537] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1639.977847] env[62510]: INFO nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Took 4.50 seconds to spawn the instance on the hypervisor. [ 1639.978173] env[62510]: DEBUG nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1639.979094] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a20464-184b-4515-9186-4626984eb1a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.984840] env[62510]: ERROR nova.scheduler.client.report [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [req-4011d877-571a-4110-bba4-d5edd9bfe842] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4011d877-571a-4110-bba4-d5edd9bfe842"}]} [ 1640.009232] env[62510]: DEBUG nova.scheduler.client.report [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1640.030542] env[62510]: DEBUG nova.scheduler.client.report [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1640.030832] env[62510]: DEBUG nova.compute.provider_tree [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1640.058932] env[62510]: DEBUG nova.scheduler.client.report [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1640.091642] env[62510]: DEBUG nova.scheduler.client.report [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1640.247541] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.343405] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.500801] env[62510]: INFO nova.compute.manager [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Took 42.82 seconds to build instance. [ 1640.566043] env[62510]: INFO nova.compute.manager [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Rebuilding instance [ 1640.659056] env[62510]: DEBUG nova.compute.manager [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1640.664674] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcb7a50-ff2b-4319-80e3-8003e703a48b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.696892] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7d1329-f7c6-4428-88f2-cc924ee2d81b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.715347] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19ee313-da3c-45cf-88ff-d70fd9729197 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.781406] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8de9a39-2f47-4906-b31e-9430e59e1fc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.793691] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.794932] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0728092-2f91-4e98-9d19-df162f0690e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.811583] env[62510]: DEBUG nova.compute.provider_tree [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1641.004908] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3166bdb0-f081-49e0-821a-5b82305432b8 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.335s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.208108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.208108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.208108] env[62510]: DEBUG nova.compute.manager [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Going to confirm migration 3 {{(pid=62510) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1641.288946] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.352477] env[62510]: DEBUG nova.scheduler.client.report [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1641.352618] env[62510]: DEBUG nova.compute.provider_tree [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 87 to 88 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1641.353175] env[62510]: DEBUG nova.compute.provider_tree [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1641.652109] env[62510]: INFO nova.compute.manager [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Rebuilding instance [ 1641.683384] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1641.683700] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-767f0dd1-3ccd-4be7-a712-76ce50699925 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.693069] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1641.693069] env[62510]: value = "task-1768845" [ 1641.693069] env[62510]: _type = "Task" [ 1641.693069] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.709147] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.721037] env[62510]: DEBUG nova.compute.manager [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1641.723557] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534080c6-9ab5-4f79-8700-4303908f7b84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.801912] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.834823] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.835015] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.835199] env[62510]: DEBUG nova.network.neutron [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1641.835383] env[62510]: DEBUG nova.objects.instance [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lazy-loading 'info_cache' on Instance uuid 350d5f83-d9ce-4997-bf57-70c4a4e22ba0 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1641.859244] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.016s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.862114] env[62510]: DEBUG nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1641.866399] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.028s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.868903] env[62510]: INFO nova.compute.claims [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1642.204042] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.212274] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "c829d602-97bc-4ec8-9090-c63bed04ac79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.212708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.290383] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.368270] env[62510]: DEBUG nova.compute.utils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1642.369726] env[62510]: DEBUG nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1642.369816] env[62510]: DEBUG nova.network.neutron [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1642.435283] env[62510]: DEBUG nova.policy [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6decc076b3da4d1b86c6aa73f1cf2674', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86abf24d608d4c438161dc0b8335dea1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1642.708405] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.718153] env[62510]: DEBUG nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1642.746337] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.746888] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b52fb18-4a6c-4cfc-8c76-39c384107741 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.759756] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1642.759756] env[62510]: value = "task-1768846" [ 1642.759756] env[62510]: _type = "Task" [ 1642.759756] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.770242] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.791582] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.878406] env[62510]: DEBUG nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1642.928078] env[62510]: DEBUG nova.network.neutron [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Successfully created port: 1e4fadb9-6725-488d-9382-0ca6b5eb0b23 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1643.213631] env[62510]: DEBUG nova.network.neutron [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance_info_cache with network_info: [{"id": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "address": "fa:16:3e:bd:11:3e", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8597503b-d7", "ovs_interfaceid": "8597503b-d757-44ff-91a7-6f52b3b75aa3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.215389] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768845, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.245976] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.275943] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768846, 'name': PowerOffVM_Task, 'duration_secs': 0.135795} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.276405] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1643.276925] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1643.277702] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b172ae-2aa3-4276-9bc7-30fb84ca13cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.293623] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1643.296606] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb163ac6-c6ca-491f-99d5-507f33bf30ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.298361] env[62510]: DEBUG oslo_vmware.api [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768838, 'name': RemoveSnapshot_Task, 'duration_secs': 5.53071} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.300901] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1643.301177] env[62510]: INFO nova.compute.manager [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Took 20.25 seconds to snapshot the instance on the hypervisor. [ 1643.408417] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1643.408417] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1643.408417] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Deleting the datastore file [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.408417] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-886590ec-3998-4be0-958a-8ecd8a30d47c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.423030] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1643.423030] env[62510]: value = "task-1768848" [ 1643.423030] env[62510]: _type = "Task" [ 1643.423030] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.426550] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1643.426782] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1643.427181] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleting the datastore file [datastore1] d3e25d50-f315-439b-9e9f-8e454a0631d4 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.427865] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fc1c4df-9827-4051-b6af-7280d836bba5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.438217] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.445290] env[62510]: DEBUG oslo_vmware.api [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1643.445290] env[62510]: value = "task-1768849" [ 1643.445290] env[62510]: _type = "Task" [ 1643.445290] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.456126] env[62510]: DEBUG oslo_vmware.api [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.531118] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc73b4ff-4f86-42c3-b328-f2085cf99a24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.541653] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1fc9e2-3765-430b-8adf-6e64325ed764 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.581081] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb51921-0e4f-4d98-b7e2-daddeb7320b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.590451] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9389c5e4-21bb-4116-b86e-d817f6aefbbf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.611246] env[62510]: DEBUG nova.compute.provider_tree [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1643.707699] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768845, 'name': PowerOffVM_Task, 'duration_secs': 1.747862} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.707905] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1643.708222] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1643.708989] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac7d958-a66a-41ce-975b-8dd268cf46be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.718490] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1643.718490] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a448412b-09a7-47e4-bfaf-8fee83145e7d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.719794] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-350d5f83-d9ce-4997-bf57-70c4a4e22ba0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.720038] env[62510]: DEBUG nova.objects.instance [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lazy-loading 'migration_context' on Instance uuid 350d5f83-d9ce-4997-bf57-70c4a4e22ba0 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1643.794618] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1643.794862] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1643.795070] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleting the datastore file [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.795354] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f4613b3-7963-402e-8497-7e5575cb9892 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.802562] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1643.802562] env[62510]: value = "task-1768851" [ 1643.802562] env[62510]: _type = "Task" [ 1643.802562] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.807405] env[62510]: DEBUG nova.compute.manager [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Instance disappeared during snapshot {{(pid=62510) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1643.817144] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.824463] env[62510]: DEBUG nova.compute.manager [None req-5b40b9a2-7abf-41aa-81d9-c12897cbc57b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image not found during clean up ad780d0f-1ac1-44e1-9cf6-81c91b73810c {{(pid=62510) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 1643.894866] env[62510]: DEBUG nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1643.933905] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126399} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.937571] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1643.937571] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1643.937571] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1643.937571] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1643.937571] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1643.938463] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1643.939139] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1643.939139] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1643.939139] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1643.939396] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1643.941694] env[62510]: DEBUG nova.virt.hardware [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1643.941694] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1643.941694] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1643.941694] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1643.948185] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695e9306-b9c0-49d9-ac1d-a3ed8e056cde {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.959792] env[62510]: DEBUG oslo_vmware.api [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.962303] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1643.962533] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1643.962709] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1643.962895] env[62510]: INFO nova.compute.manager [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Took 4.86 seconds to destroy the instance on the hypervisor. [ 1643.964251] env[62510]: DEBUG oslo.service.loopingcall [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1643.964644] env[62510]: DEBUG nova.compute.manager [-] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1643.964839] env[62510]: DEBUG nova.network.neutron [-] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1643.967827] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd66c17-3c38-400b-b5f0-212927d3dbde {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.138344] env[62510]: ERROR nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [req-852d2781-8c59-4fb4-9804-de0066b2ef69] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-852d2781-8c59-4fb4-9804-de0066b2ef69"}]} [ 1644.161616] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1644.185157] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1644.185157] env[62510]: DEBUG nova.compute.provider_tree [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1644.203974] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1644.223666] env[62510]: DEBUG nova.objects.base [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Object Instance<350d5f83-d9ce-4997-bf57-70c4a4e22ba0> lazy-loaded attributes: info_cache,migration_context {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1644.227027] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41d0315-c965-4684-961e-1fa872d89bd5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.252333] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1644.254423] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19fc6eeb-b337-4d9b-b9cd-f49c5d49f302 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.261168] env[62510]: DEBUG oslo_vmware.api [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1644.261168] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52283575-4a5d-37f9-d1ea-750c6af9f196" [ 1644.261168] env[62510]: _type = "Task" [ 1644.261168] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.270688] env[62510]: DEBUG oslo_vmware.api [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52283575-4a5d-37f9-d1ea-750c6af9f196, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.315289] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264425} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.315588] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1644.316150] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1644.316560] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1644.467125] env[62510]: DEBUG nova.compute.manager [req-6d703ca4-a452-44e4-b890-6baca193628e req-c953b94a-7cc6-4ff5-b24b-b4f727e9743a service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Received event network-vif-deleted-28687f38-1a1a-40ad-ad64-c571d7a7dbe3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1644.467342] env[62510]: INFO nova.compute.manager [req-6d703ca4-a452-44e4-b890-6baca193628e req-c953b94a-7cc6-4ff5-b24b-b4f727e9743a service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Neutron deleted interface 28687f38-1a1a-40ad-ad64-c571d7a7dbe3; detaching it from the instance and deleting it from the info cache [ 1644.467517] env[62510]: DEBUG nova.network.neutron [req-6d703ca4-a452-44e4-b890-6baca193628e req-c953b94a-7cc6-4ff5-b24b-b4f727e9743a service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.757456] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b5670d-d265-4e90-bc2d-9719a834e2a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.768248] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72928dc-f001-4ae2-873d-96ff3e27a5d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.774803] env[62510]: DEBUG oslo_vmware.api [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52283575-4a5d-37f9-d1ea-750c6af9f196, 'name': SearchDatastore_Task, 'duration_secs': 0.008516} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.775401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.805258] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a4cb05-30d4-4018-a9ed-d7e82d7d5e85 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.814335] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d339813c-76c7-4055-bd4f-b1d1378b688b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.579019] env[62510]: DEBUG nova.network.neutron [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Successfully updated port: 1e4fadb9-6725-488d-9382-0ca6b5eb0b23 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1645.587021] env[62510]: DEBUG nova.network.neutron [-] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.588303] env[62510]: DEBUG nova.compute.manager [req-ec7f15cb-0d97-4c59-a450-b68715be139b req-79f9533a-f160-47e6-8c51-6b7b6c09190e service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Received event network-vif-plugged-1e4fadb9-6725-488d-9382-0ca6b5eb0b23 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1645.588548] env[62510]: DEBUG oslo_concurrency.lockutils [req-ec7f15cb-0d97-4c59-a450-b68715be139b req-79f9533a-f160-47e6-8c51-6b7b6c09190e service nova] Acquiring lock "90869287-22bd-438c-8684-56f5d43e3ca8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.588670] env[62510]: DEBUG oslo_concurrency.lockutils [req-ec7f15cb-0d97-4c59-a450-b68715be139b req-79f9533a-f160-47e6-8c51-6b7b6c09190e service nova] Lock "90869287-22bd-438c-8684-56f5d43e3ca8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.588877] env[62510]: DEBUG oslo_concurrency.lockutils [req-ec7f15cb-0d97-4c59-a450-b68715be139b req-79f9533a-f160-47e6-8c51-6b7b6c09190e service nova] Lock "90869287-22bd-438c-8684-56f5d43e3ca8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.589062] env[62510]: DEBUG nova.compute.manager [req-ec7f15cb-0d97-4c59-a450-b68715be139b req-79f9533a-f160-47e6-8c51-6b7b6c09190e service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] No waiting events found dispatching network-vif-plugged-1e4fadb9-6725-488d-9382-0ca6b5eb0b23 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1645.589244] env[62510]: WARNING nova.compute.manager [req-ec7f15cb-0d97-4c59-a450-b68715be139b req-79f9533a-f160-47e6-8c51-6b7b6c09190e service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Received unexpected event network-vif-plugged-1e4fadb9-6725-488d-9382-0ca6b5eb0b23 for instance with vm_state building and task_state spawning. [ 1645.590416] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc4c7e44-92b0-4381-b9a8-5268b465cd81 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.607081] env[62510]: DEBUG nova.compute.provider_tree [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1645.615622] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93406fc1-8d7d-46f2-a75b-89886fc7f04c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.634363] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1645.634641] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1645.634769] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1645.634944] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1645.635104] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1645.635270] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1645.635461] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1645.635615] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1645.635776] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1645.635939] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1645.636134] env[62510]: DEBUG nova.virt.hardware [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1645.637700] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6be05e3-1af4-4bc6-8462-edd51019c821 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.651813] env[62510]: DEBUG nova.compute.manager [req-6d703ca4-a452-44e4-b890-6baca193628e req-c953b94a-7cc6-4ff5-b24b-b4f727e9743a service nova] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Detach interface failed, port_id=28687f38-1a1a-40ad-ad64-c571d7a7dbe3, reason: Instance d3e25d50-f315-439b-9e9f-8e454a0631d4 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1645.658345] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d602feb5-1d9d-45af-99a4-f71f5c0d5041 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.674463] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1645.680207] env[62510]: DEBUG oslo.service.loopingcall [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1645.680465] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1645.680673] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69885f58-8d07-4746-83bd-6aba7193737d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.699101] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1645.699101] env[62510]: value = "task-1768852" [ 1645.699101] env[62510]: _type = "Task" [ 1645.699101] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.708100] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768852, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.845228] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "c8e69231-2786-47ac-9a44-c194088b8079" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1645.845228] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "c8e69231-2786-47ac-9a44-c194088b8079" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.093353] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-90869287-22bd-438c-8684-56f5d43e3ca8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.093518] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-90869287-22bd-438c-8684-56f5d43e3ca8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.093627] env[62510]: DEBUG nova.network.neutron [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1646.095058] env[62510]: INFO nova.compute.manager [-] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Took 2.13 seconds to deallocate network for instance. [ 1646.116454] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1646.116682] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1646.116836] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1646.117062] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1646.117224] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1646.117373] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1646.117682] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1646.117761] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1646.117899] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1646.118039] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1646.118239] env[62510]: DEBUG nova.virt.hardware [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1646.119348] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe5628f-94e7-4c6b-9d8d-e26bead122a3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.127572] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6ccf07-043b-452c-9030-a17bd3740113 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.132691] env[62510]: ERROR nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [req-cda8a2fd-764c-4205-b1d7-cee12afe9f1f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cda8a2fd-764c-4205-b1d7-cee12afe9f1f"}]} [ 1646.145894] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:5b:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1646.153378] env[62510]: DEBUG oslo.service.loopingcall [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1646.154441] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1646.156264] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1646.156928] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c390e4fd-74d2-4a6e-bfa9-7f3237b3d42c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.171820] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1646.171951] env[62510]: DEBUG nova.compute.provider_tree [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1646.180913] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1646.180913] env[62510]: value = "task-1768853" [ 1646.180913] env[62510]: _type = "Task" [ 1646.180913] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.184580] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1646.192118] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768853, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.203998] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1646.212509] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768852, 'name': CreateVM_Task, 'duration_secs': 0.285839} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.212702] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1646.213029] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.213191] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.213571] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1646.213835] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3b5694b-8c7f-4d35-805e-c29ac6d4ff16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.219242] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1646.219242] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52332674-8de0-99e6-49ff-24c96afbff6c" [ 1646.219242] env[62510]: _type = "Task" [ 1646.219242] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.230228] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52332674-8de0-99e6-49ff-24c96afbff6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.347599] env[62510]: DEBUG nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1646.600730] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.656792] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd0f393-580f-4eab-b241-04a74108a3ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.663231] env[62510]: DEBUG nova.network.neutron [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1646.667912] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7340b09-b968-44e4-8d95-eb5d54cb9005 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.708189] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a57dd-17bf-4604-a908-abb3ac91e990 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.719159] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc7bb86-80d5-413c-a76f-1a453e9356fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.722908] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768853, 'name': CreateVM_Task, 'duration_secs': 0.494123} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.723174] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1646.727328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.737722] env[62510]: DEBUG nova.compute.provider_tree [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1646.744766] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52332674-8de0-99e6-49ff-24c96afbff6c, 'name': SearchDatastore_Task, 'duration_secs': 0.011805} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.745651] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.745955] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1646.746161] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.746311] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.746486] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1646.746778] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.747117] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1646.747352] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dc2fd44-60fe-49cb-9042-383fbd994cd5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.749676] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-995de2c9-1cc0-4242-b40a-4489d5405ca0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.756378] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1646.756378] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5202b729-c98a-578c-b3ce-98e92caa79b8" [ 1646.756378] env[62510]: _type = "Task" [ 1646.756378] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.760803] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1646.761025] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1646.762081] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b4db24d-9f53-4a66-a5ec-4b0aeb5bc557 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.767659] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5202b729-c98a-578c-b3ce-98e92caa79b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.771415] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1646.771415] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52296c40-8ba2-8f42-f558-2c7fc3d04afe" [ 1646.771415] env[62510]: _type = "Task" [ 1646.771415] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.779868] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52296c40-8ba2-8f42-f558-2c7fc3d04afe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.878960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.941090] env[62510]: DEBUG nova.network.neutron [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Updating instance_info_cache with network_info: [{"id": "1e4fadb9-6725-488d-9382-0ca6b5eb0b23", "address": "fa:16:3e:d3:6a:cc", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e4fadb9-67", "ovs_interfaceid": "1e4fadb9-6725-488d-9382-0ca6b5eb0b23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.268188] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5202b729-c98a-578c-b3ce-98e92caa79b8, 'name': SearchDatastore_Task, 'duration_secs': 0.017555} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.268188] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.268347] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1647.268553] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.275758] env[62510]: DEBUG nova.scheduler.client.report [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 90 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1647.276152] env[62510]: DEBUG nova.compute.provider_tree [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 90 to 91 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1647.276257] env[62510]: DEBUG nova.compute.provider_tree [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1647.286836] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52296c40-8ba2-8f42-f558-2c7fc3d04afe, 'name': SearchDatastore_Task, 'duration_secs': 0.009744} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.287622] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4da0fd28-a090-4775-a4e0-f65520cf7f19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.296451] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1647.296451] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52265ae6-3a9a-f903-19d7-2a046866c5e4" [ 1647.296451] env[62510]: _type = "Task" [ 1647.296451] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.307282] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52265ae6-3a9a-f903-19d7-2a046866c5e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010978} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.307528] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.307783] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1647.308063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.308269] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1647.308481] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c24311c4-759b-4411-a49b-31f3b9564a29 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.310488] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e259d6f4-ebce-458f-ab72-29138348c8cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.318641] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1647.318641] env[62510]: value = "task-1768854" [ 1647.318641] env[62510]: _type = "Task" [ 1647.318641] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.323364] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1647.323551] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1647.324599] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9934e437-a007-401e-8d01-1fdb12d07a8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.329676] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.333025] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1647.333025] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52768524-bfab-aa5d-e5a0-610e6ef3815e" [ 1647.333025] env[62510]: _type = "Task" [ 1647.333025] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.342120] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52768524-bfab-aa5d-e5a0-610e6ef3815e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.421601] env[62510]: DEBUG nova.compute.manager [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Received event network-changed-1e4fadb9-6725-488d-9382-0ca6b5eb0b23 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1647.421601] env[62510]: DEBUG nova.compute.manager [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Refreshing instance network info cache due to event network-changed-1e4fadb9-6725-488d-9382-0ca6b5eb0b23. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1647.421601] env[62510]: DEBUG oslo_concurrency.lockutils [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] Acquiring lock "refresh_cache-90869287-22bd-438c-8684-56f5d43e3ca8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.444240] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-90869287-22bd-438c-8684-56f5d43e3ca8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.444789] env[62510]: DEBUG nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Instance network_info: |[{"id": "1e4fadb9-6725-488d-9382-0ca6b5eb0b23", "address": "fa:16:3e:d3:6a:cc", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e4fadb9-67", "ovs_interfaceid": "1e4fadb9-6725-488d-9382-0ca6b5eb0b23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1647.445111] env[62510]: DEBUG oslo_concurrency.lockutils [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] Acquired lock "refresh_cache-90869287-22bd-438c-8684-56f5d43e3ca8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.445428] env[62510]: DEBUG nova.network.neutron [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Refreshing network info cache for port 1e4fadb9-6725-488d-9382-0ca6b5eb0b23 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1647.447058] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:6a:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e4fadb9-6725-488d-9382-0ca6b5eb0b23', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1647.456035] env[62510]: DEBUG oslo.service.loopingcall [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.459631] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1647.460195] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ceb91a0-780a-4876-8ba6-f0bcae5ebcc2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.485329] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1647.485329] env[62510]: value = "task-1768855" [ 1647.485329] env[62510]: _type = "Task" [ 1647.485329] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.494758] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768855, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.781911] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.915s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.782876] env[62510]: DEBUG nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1647.788722] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.827s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.789077] env[62510]: DEBUG nova.objects.instance [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lazy-loading 'resources' on Instance uuid fa43a538-1aae-4642-8370-70f2a49ca92c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1647.809651] env[62510]: DEBUG nova.network.neutron [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Updated VIF entry in instance network info cache for port 1e4fadb9-6725-488d-9382-0ca6b5eb0b23. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1647.810366] env[62510]: DEBUG nova.network.neutron [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Updating instance_info_cache with network_info: [{"id": "1e4fadb9-6725-488d-9382-0ca6b5eb0b23", "address": "fa:16:3e:d3:6a:cc", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e4fadb9-67", "ovs_interfaceid": "1e4fadb9-6725-488d-9382-0ca6b5eb0b23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.830045] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768854, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.849691] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52768524-bfab-aa5d-e5a0-610e6ef3815e, 'name': SearchDatastore_Task, 'duration_secs': 0.009844} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.853156] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be86f9b6-f937-4e35-8faf-f35ad57a8c85 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.857902] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1647.857902] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528c179d-0b4a-12fe-3bed-3430dcb9be16" [ 1647.857902] env[62510]: _type = "Task" [ 1647.857902] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.869264] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528c179d-0b4a-12fe-3bed-3430dcb9be16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.995854] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768855, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.297775] env[62510]: DEBUG nova.compute.utils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1648.300010] env[62510]: DEBUG nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1648.301026] env[62510]: DEBUG nova.network.neutron [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1648.313166] env[62510]: DEBUG oslo_concurrency.lockutils [req-0d9608ae-df8d-4d1b-854c-0a816ea8e63c req-4f7ca21d-3570-45be-8bed-04f2e2ecfa4d service nova] Releasing lock "refresh_cache-90869287-22bd-438c-8684-56f5d43e3ca8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.332581] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768854, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521931} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.334922] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1648.335104] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1648.335532] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ab4839b-37c5-4f12-bf7e-e6d6be89cae6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.341084] env[62510]: DEBUG nova.policy [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbc6eced57ea45fdafc3635a58fb3611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f878b652f01c48139bfc6996e5e32f5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1648.343923] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1648.343923] env[62510]: value = "task-1768856" [ 1648.343923] env[62510]: _type = "Task" [ 1648.343923] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.355606] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768856, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.371674] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528c179d-0b4a-12fe-3bed-3430dcb9be16, 'name': SearchDatastore_Task, 'duration_secs': 0.009787} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.371956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.372411] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1648.372411] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd787d9a-680d-4736-bd1e-ad5f20bb759e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.380273] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1648.380273] env[62510]: value = "task-1768857" [ 1648.380273] env[62510]: _type = "Task" [ 1648.380273] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.389746] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768857, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.496830] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768855, 'name': CreateVM_Task, 'duration_secs': 0.518943} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.499217] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1648.500095] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.500264] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.500576] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1648.500835] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75ce7984-6d85-40a7-9b2f-eae2ae480e2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.506503] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1648.506503] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b0cf58-f5a6-4ef3-35e4-62319975c989" [ 1648.506503] env[62510]: _type = "Task" [ 1648.506503] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.515720] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b0cf58-f5a6-4ef3-35e4-62319975c989, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.787025] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b35b0e-cadd-4ce5-a158-27001351e01c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.788090] env[62510]: DEBUG nova.network.neutron [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Successfully created port: 5c6ed492-d50b-40a9-933c-22b7bd04020b {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1648.798017] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3dd72ef-081d-4313-bf46-4cd9d4d7feb0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.803363] env[62510]: DEBUG nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1648.838074] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf04557-0c57-4986-a73a-3b559fc1830b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.854460] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2731eec5-ca3c-4745-92e1-4e72993a985b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.865689] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768856, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071585} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.874172] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1648.875092] env[62510]: DEBUG nova.compute.provider_tree [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.877704] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72080dc2-1787-4e20-ae81-a9a68456211d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.902408] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1648.903646] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ffe2868-16ac-42ee-9a87-ca82144f3001 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.922542] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768857, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.929853] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1648.929853] env[62510]: value = "task-1768858" [ 1648.929853] env[62510]: _type = "Task" [ 1648.929853] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.939650] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.017317] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b0cf58-f5a6-4ef3-35e4-62319975c989, 'name': SearchDatastore_Task, 'duration_secs': 0.01315} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.017631] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.017873] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1649.018122] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.018269] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.018445] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1649.018704] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-112d0429-aeb8-4e0c-9223-e99b80461887 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.027275] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1649.027450] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1649.028185] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3db049c-a832-4145-b54d-1a32a6fd5106 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.033645] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1649.033645] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522261aa-e8bd-3896-3771-263dd51d68a8" [ 1649.033645] env[62510]: _type = "Task" [ 1649.033645] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.041871] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522261aa-e8bd-3896-3771-263dd51d68a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.381603] env[62510]: DEBUG nova.scheduler.client.report [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1649.395469] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768857, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552785} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.396446] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1649.396446] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1649.396446] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-32c979ef-28cd-4ea2-a486-28db2f38d73b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.404862] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1649.404862] env[62510]: value = "task-1768859" [ 1649.404862] env[62510]: _type = "Task" [ 1649.404862] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.413859] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768859, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.441183] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768858, 'name': ReconfigVM_Task, 'duration_secs': 0.345673} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.441554] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7/4d622ed5-5f6f-46ca-bc4a-efb32f452cb7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1649.442251] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3653b183-cec1-43ca-9194-88dbe33fa29f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.450653] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1649.450653] env[62510]: value = "task-1768860" [ 1649.450653] env[62510]: _type = "Task" [ 1649.450653] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.459675] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768860, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.546212] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522261aa-e8bd-3896-3771-263dd51d68a8, 'name': SearchDatastore_Task, 'duration_secs': 0.009897} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.546957] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b87ec65-09d0-4aed-9899-ea08dc7f5718 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.552678] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1649.552678] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52dd8a65-be0e-cb27-8e21-814cecd183ba" [ 1649.552678] env[62510]: _type = "Task" [ 1649.552678] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.561509] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dd8a65-be0e-cb27-8e21-814cecd183ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.813667] env[62510]: DEBUG nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1649.838223] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1649.838223] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1649.838450] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1649.838450] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1649.838580] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1649.838725] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1649.838931] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1649.839099] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1649.839262] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1649.839422] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1649.840032] env[62510]: DEBUG nova.virt.hardware [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1649.840553] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d507ee-047f-4309-8412-53407cea7242 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.849447] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8528b2d-6547-490a-827b-62c91f9cc360 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.890441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.102s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.892594] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.308s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.892803] env[62510]: DEBUG nova.objects.instance [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'resources' on Instance uuid 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1649.912758] env[62510]: INFO nova.scheduler.client.report [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Deleted allocations for instance fa43a538-1aae-4642-8370-70f2a49ca92c [ 1649.917753] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768859, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078865} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.921655] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1649.921655] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635cf033-542f-4e16-9441-f7f3e5bda36b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.946843] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1649.946843] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3c645c6-88bc-475f-afe1-a0d9b5e64227 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.969481] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768860, 'name': Rename_Task, 'duration_secs': 0.165183} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.970795] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1649.971343] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1649.971343] env[62510]: value = "task-1768861" [ 1649.971343] env[62510]: _type = "Task" [ 1649.971343] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.971550] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5321022-ae3c-4882-8756-6edf1460770e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.983598] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.984879] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1649.984879] env[62510]: value = "task-1768862" [ 1649.984879] env[62510]: _type = "Task" [ 1649.984879] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.993180] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.064638] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dd8a65-be0e-cb27-8e21-814cecd183ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010579} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.064772] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.065139] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1650.066756] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a754cc8-2ea4-44b4-adb7-a6716b316c63 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.072635] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1650.072635] env[62510]: value = "task-1768863" [ 1650.072635] env[62510]: _type = "Task" [ 1650.072635] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.082254] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.395552] env[62510]: DEBUG nova.objects.instance [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'numa_topology' on Instance uuid 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1650.425168] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e2e639c-d178-4ed9-bfc0-9d2f6bc3fb79 tempest-ServersAaction247Test-1426561660 tempest-ServersAaction247Test-1426561660-project-member] Lock "fa43a538-1aae-4642-8370-70f2a49ca92c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.984s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.486617] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768861, 'name': ReconfigVM_Task, 'duration_secs': 0.324844} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.489912] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Reconfigured VM instance instance-00000015 to attach disk [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7/e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1650.490604] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb413195-7f0d-4d5c-88d0-ceeaa989ab8a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.498663] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1650.498663] env[62510]: value = "task-1768864" [ 1650.498663] env[62510]: _type = "Task" [ 1650.498663] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.502573] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768862, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.512849] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768864, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.587216] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768863, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.598056] env[62510]: DEBUG nova.compute.manager [req-b08f5635-40f1-4b19-9c9b-648e4fba61e5 req-da68c12d-575f-4e13-9ef5-006268bcfb5d service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Received event network-vif-plugged-5c6ed492-d50b-40a9-933c-22b7bd04020b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1650.598418] env[62510]: DEBUG oslo_concurrency.lockutils [req-b08f5635-40f1-4b19-9c9b-648e4fba61e5 req-da68c12d-575f-4e13-9ef5-006268bcfb5d service nova] Acquiring lock "0082eb97-26e9-4196-b8e3-63460d32dd19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.598673] env[62510]: DEBUG oslo_concurrency.lockutils [req-b08f5635-40f1-4b19-9c9b-648e4fba61e5 req-da68c12d-575f-4e13-9ef5-006268bcfb5d service nova] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.598869] env[62510]: DEBUG oslo_concurrency.lockutils [req-b08f5635-40f1-4b19-9c9b-648e4fba61e5 req-da68c12d-575f-4e13-9ef5-006268bcfb5d service nova] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.599059] env[62510]: DEBUG nova.compute.manager [req-b08f5635-40f1-4b19-9c9b-648e4fba61e5 req-da68c12d-575f-4e13-9ef5-006268bcfb5d service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] No waiting events found dispatching network-vif-plugged-5c6ed492-d50b-40a9-933c-22b7bd04020b {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1650.599233] env[62510]: WARNING nova.compute.manager [req-b08f5635-40f1-4b19-9c9b-648e4fba61e5 req-da68c12d-575f-4e13-9ef5-006268bcfb5d service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Received unexpected event network-vif-plugged-5c6ed492-d50b-40a9-933c-22b7bd04020b for instance with vm_state building and task_state spawning. [ 1650.737725] env[62510]: DEBUG nova.network.neutron [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Successfully updated port: 5c6ed492-d50b-40a9-933c-22b7bd04020b {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1650.898452] env[62510]: DEBUG nova.objects.base [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Object Instance<83fa0d32-18ee-401d-af0b-a0adb538e5f4> lazy-loaded attributes: resources,numa_topology {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1651.002718] env[62510]: DEBUG oslo_vmware.api [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768862, 'name': PowerOnVM_Task, 'duration_secs': 0.830047} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.007908] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1651.008797] env[62510]: DEBUG nova.compute.manager [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1651.009460] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72114839-a81f-4b16-8cdd-a32a0b962814 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.024050] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768864, 'name': Rename_Task, 'duration_secs': 0.183902} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.027029] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1651.027420] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9831c74-02d2-47cb-836f-42f01b75b17b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.036602] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1651.036602] env[62510]: value = "task-1768865" [ 1651.036602] env[62510]: _type = "Task" [ 1651.036602] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.045540] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768865, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.089925] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768863, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571693} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.089925] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1651.089925] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1651.089925] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c3f0922-1fbf-48bc-83d4-a23561a78193 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.103099] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1651.103099] env[62510]: value = "task-1768866" [ 1651.103099] env[62510]: _type = "Task" [ 1651.103099] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.121128] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.238245] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-0082eb97-26e9-4196-b8e3-63460d32dd19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.238447] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-0082eb97-26e9-4196-b8e3-63460d32dd19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1651.238614] env[62510]: DEBUG nova.network.neutron [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1651.420032] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013278ca-b7ad-4199-b772-12968964d82d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.428890] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f5b925-9aa0-4cb3-8ee6-f8aad2d486d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.461570] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d364ee0-b99d-44ff-9294-47a09a7a8e85 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.470251] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e21efe-432a-4188-a7d6-56c762cc2a07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.484748] env[62510]: DEBUG nova.compute.provider_tree [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.531658] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.547423] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768865, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.611741] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095074} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.612302] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1651.612986] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515069ac-df8a-4fa4-a665-60a0964ef49c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.636854] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1651.637308] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9329e68b-a5f4-4158-bc0b-33200ee8746e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.658618] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1651.658618] env[62510]: value = "task-1768867" [ 1651.658618] env[62510]: _type = "Task" [ 1651.658618] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.668532] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.792956] env[62510]: DEBUG nova.network.neutron [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1651.988530] env[62510]: DEBUG nova.scheduler.client.report [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1651.997923] env[62510]: DEBUG nova.network.neutron [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Updating instance_info_cache with network_info: [{"id": "5c6ed492-d50b-40a9-933c-22b7bd04020b", "address": "fa:16:3e:cf:a9:d9", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6ed492-d5", "ovs_interfaceid": "5c6ed492-d50b-40a9-933c-22b7bd04020b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.046894] env[62510]: DEBUG oslo_vmware.api [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768865, 'name': PowerOnVM_Task, 'duration_secs': 0.535019} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.047257] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1652.047486] env[62510]: DEBUG nova.compute.manager [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1652.048293] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c01ef4-5b0a-4131-8801-a6c2d102611a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.171214] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768867, 'name': ReconfigVM_Task, 'duration_secs': 0.295168} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.171873] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1652.172273] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5463cef3-813e-4070-ba0b-3c028d65e597 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.184847] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1652.184847] env[62510]: value = "task-1768868" [ 1652.184847] env[62510]: _type = "Task" [ 1652.184847] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.197726] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768868, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.495654] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.603s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.499027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.445s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.500582] env[62510]: INFO nova.compute.claims [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1652.503917] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-0082eb97-26e9-4196-b8e3-63460d32dd19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.504212] env[62510]: DEBUG nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Instance network_info: |[{"id": "5c6ed492-d50b-40a9-933c-22b7bd04020b", "address": "fa:16:3e:cf:a9:d9", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6ed492-d5", "ovs_interfaceid": "5c6ed492-d50b-40a9-933c-22b7bd04020b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1652.504599] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:a9:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c6ed492-d50b-40a9-933c-22b7bd04020b', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1652.513328] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating folder: Project (f878b652f01c48139bfc6996e5e32f5b). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1652.513843] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.514089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.514306] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.514629] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.514818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.519279] env[62510]: INFO nova.compute.manager [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Terminating instance [ 1652.521132] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f9f4102-4799-44a3-afb2-279f03cef606 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.549449] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created folder: Project (f878b652f01c48139bfc6996e5e32f5b) in parent group-v367197. [ 1652.549646] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating folder: Instances. Parent ref: group-v367361. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1652.549883] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-069998c8-b063-402a-a7e3-d27d30db1f17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.568021] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created folder: Instances in parent group-v367361. [ 1652.568260] env[62510]: DEBUG oslo.service.loopingcall [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1652.568825] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.569054] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1652.569264] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ab04fc0-a5aa-49ba-bfa6-94485e305252 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.593560] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1652.593560] env[62510]: value = "task-1768871" [ 1652.593560] env[62510]: _type = "Task" [ 1652.593560] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.601794] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.698501] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768868, 'name': Rename_Task, 'duration_secs': 0.171337} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.698796] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1652.699103] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fa74047-4d4b-4cb8-959a-fb06fee53d55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.711280] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1652.711280] env[62510]: value = "task-1768872" [ 1652.711280] env[62510]: _type = "Task" [ 1652.711280] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.712705] env[62510]: DEBUG nova.compute.manager [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Received event network-changed-5c6ed492-d50b-40a9-933c-22b7bd04020b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1652.712900] env[62510]: DEBUG nova.compute.manager [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Refreshing instance network info cache due to event network-changed-5c6ed492-d50b-40a9-933c-22b7bd04020b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1652.713134] env[62510]: DEBUG oslo_concurrency.lockutils [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] Acquiring lock "refresh_cache-0082eb97-26e9-4196-b8e3-63460d32dd19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.713278] env[62510]: DEBUG oslo_concurrency.lockutils [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] Acquired lock "refresh_cache-0082eb97-26e9-4196-b8e3-63460d32dd19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.713441] env[62510]: DEBUG nova.network.neutron [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Refreshing network info cache for port 5c6ed492-d50b-40a9-933c-22b7bd04020b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1652.726920] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.003818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1367244e-a3a2-45ac-ad10-4a4aafeef9dd tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 66.044s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.015026] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 41.328s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.015026] env[62510]: INFO nova.compute.manager [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Unshelving [ 1653.021650] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "refresh_cache-4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.022599] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquired lock "refresh_cache-4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.023497] env[62510]: DEBUG nova.network.neutron [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1653.104843] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.230852] env[62510]: DEBUG oslo_vmware.api [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768872, 'name': PowerOnVM_Task, 'duration_secs': 0.474744} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.230852] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1653.230989] env[62510]: INFO nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Took 9.34 seconds to spawn the instance on the hypervisor. [ 1653.231205] env[62510]: DEBUG nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1653.232105] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2076de43-0604-4607-b85f-dbd7e09543d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.552098] env[62510]: DEBUG nova.network.neutron [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1653.575771] env[62510]: DEBUG nova.network.neutron [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Updated VIF entry in instance network info cache for port 5c6ed492-d50b-40a9-933c-22b7bd04020b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1653.576406] env[62510]: DEBUG nova.network.neutron [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Updating instance_info_cache with network_info: [{"id": "5c6ed492-d50b-40a9-933c-22b7bd04020b", "address": "fa:16:3e:cf:a9:d9", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c6ed492-d5", "ovs_interfaceid": "5c6ed492-d50b-40a9-933c-22b7bd04020b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.605285] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.618287] env[62510]: DEBUG nova.network.neutron [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.755526] env[62510]: INFO nova.compute.manager [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Took 51.69 seconds to build instance. [ 1653.871432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "0d27da5c-20f3-4df1-86d2-036c904fd657" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.871432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.871432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "0d27da5c-20f3-4df1-86d2-036c904fd657-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.871432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.871432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.872638] env[62510]: INFO nova.compute.manager [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Terminating instance [ 1654.037241] env[62510]: DEBUG nova.compute.utils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1654.078770] env[62510]: DEBUG oslo_concurrency.lockutils [req-73cadb20-e3c5-4300-b4ee-c850a28be2c2 req-08d11012-e12c-4656-a44e-add23d55bb98 service nova] Releasing lock "refresh_cache-0082eb97-26e9-4196-b8e3-63460d32dd19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.095310] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4eea92-5f44-4ac9-b9d5-56f78aea9ada {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.106905] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.109729] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2092c477-9ab4-4b95-9379-079ca2fa0e41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.147886] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Releasing lock "refresh_cache-4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.148341] env[62510]: DEBUG nova.compute.manager [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1654.148538] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1654.149774] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6b6ebe-5e0e-4688-b675-63756b8e5e0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.152974] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e8d1e6-61db-431f-9122-ea996eae5a82 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.162633] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1654.164614] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-728471ff-b902-4dec-ad08-b6a6c6a3907b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.167287] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ee3ea3-b50a-4fe5-bd88-11ea42cf6d74 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.186193] env[62510]: DEBUG nova.compute.provider_tree [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.186828] env[62510]: DEBUG oslo_vmware.api [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1654.186828] env[62510]: value = "task-1768873" [ 1654.186828] env[62510]: _type = "Task" [ 1654.186828] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.197264] env[62510]: DEBUG oslo_vmware.api [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768873, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.258568] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4f908963-9d48-4865-850d-ef88b60e7d7a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.212s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.377013] env[62510]: DEBUG nova.compute.manager [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1654.377399] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1654.379242] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb53c299-4a4c-4670-ac05-32a64a4642f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.389954] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1654.390218] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-370b556a-3b2c-4f06-acfa-ed62cf635f2a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.398588] env[62510]: DEBUG oslo_vmware.api [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1654.398588] env[62510]: value = "task-1768874" [ 1654.398588] env[62510]: _type = "Task" [ 1654.398588] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.408371] env[62510]: DEBUG oslo_vmware.api [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.543122] env[62510]: INFO nova.virt.block_device [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Booting with volume fc768519-1bbc-47ae-b9f1-9717554b1759 at /dev/sdb [ 1654.588724] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7811f359-9951-493a-94b1-dc09d7c6908c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.607631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9901fefd-af73-4aaa-9579-8fba34c8c28f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.622522] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.650191] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5aff086-312b-464e-9c77-062cbe0a17ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.659689] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8556e725-bf95-4788-9668-9fade2488d68 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.697155] env[62510]: DEBUG nova.scheduler.client.report [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1654.704148] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0a584d-3a86-4b12-8647-1dccc0ceb8ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.712801] env[62510]: DEBUG oslo_vmware.api [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768873, 'name': PowerOffVM_Task, 'duration_secs': 0.152288} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.714927] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1654.715315] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1654.715720] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54dc4231-2ee2-4f0d-a304-f71fa127eb49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.717540] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28007e57-dfb2-4b28-8740-9a2f42b2292e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.737251] env[62510]: DEBUG nova.virt.block_device [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating existing volume attachment record: 3c589144-ffe8-414f-a650-9a1cbb18c4ef {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1654.801296] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1654.801742] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1654.801742] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Deleting the datastore file [datastore1] 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1654.801945] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c8c2723-307c-485a-8d5e-d073e000c180 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.810803] env[62510]: DEBUG oslo_vmware.api [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for the task: (returnval){ [ 1654.810803] env[62510]: value = "task-1768876" [ 1654.810803] env[62510]: _type = "Task" [ 1654.810803] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.819523] env[62510]: DEBUG oslo_vmware.api [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768876, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.913247] env[62510]: DEBUG oslo_vmware.api [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768874, 'name': PowerOffVM_Task, 'duration_secs': 0.223074} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.913630] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1654.913907] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1654.915037] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f69ccba6-d7df-4a6b-b466-e0735b5da4fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.950282] env[62510]: DEBUG oslo_concurrency.lockutils [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "90869287-22bd-438c-8684-56f5d43e3ca8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.950462] env[62510]: DEBUG oslo_concurrency.lockutils [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.950649] env[62510]: DEBUG nova.compute.manager [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1654.951589] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbbb5d95-07f8-4822-905b-45283cb94d81 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.958991] env[62510]: DEBUG nova.compute.manager [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1654.959768] env[62510]: DEBUG nova.objects.instance [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'flavor' on Instance uuid 90869287-22bd-438c-8684-56f5d43e3ca8 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1655.110445] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.203416] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.703s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.203416] env[62510]: DEBUG nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1655.207924] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 43.853s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.209412] env[62510]: INFO nova.compute.claims [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1655.324201] env[62510]: DEBUG oslo_vmware.api [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Task: {'id': task-1768876, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.383491} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.324514] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1655.324703] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1655.324875] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1655.325069] env[62510]: INFO nova.compute.manager [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1655.325323] env[62510]: DEBUG oslo.service.loopingcall [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1655.325513] env[62510]: DEBUG nova.compute.manager [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1655.325609] env[62510]: DEBUG nova.network.neutron [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1655.342979] env[62510]: DEBUG nova.network.neutron [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1655.614061] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.715514] env[62510]: DEBUG nova.compute.utils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1655.717384] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.509s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.722275] env[62510]: DEBUG nova.compute.utils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Instance 1f0ab639-bfcb-48eb-a079-ea07dd627c2f could not be found. {{(pid=62510) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1655.722275] env[62510]: DEBUG nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1655.722275] env[62510]: DEBUG nova.network.neutron [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1655.722275] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.269s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.722275] env[62510]: DEBUG nova.objects.instance [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lazy-loading 'resources' on Instance uuid 641628d1-bb6d-4207-89b9-98014328e028 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1655.727837] env[62510]: DEBUG nova.compute.manager [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Instance disappeared during build. {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2520}} [ 1655.727837] env[62510]: DEBUG nova.compute.manager [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Unplugging VIFs for instance {{(pid=62510) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1655.728033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Acquiring lock "refresh_cache-1f0ab639-bfcb-48eb-a079-ea07dd627c2f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.728184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Acquired lock "refresh_cache-1f0ab639-bfcb-48eb-a079-ea07dd627c2f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1655.728372] env[62510]: DEBUG nova.network.neutron [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1655.770493] env[62510]: DEBUG nova.policy [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '444e79fb7f11488b8148a7329bbc6823', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ca23491c4194bee84d0e9be0b015342', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1655.845818] env[62510]: DEBUG nova.network.neutron [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1655.971033] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1655.971147] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7e318e96-7a94-4fcb-8c6a-87437dbf6e39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.985111] env[62510]: DEBUG oslo_vmware.api [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1655.985111] env[62510]: value = "task-1768881" [ 1655.985111] env[62510]: _type = "Task" [ 1655.985111] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.995400] env[62510]: DEBUG oslo_vmware.api [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.101023] env[62510]: DEBUG nova.network.neutron [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Successfully created port: 8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1656.112804] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.222405] env[62510]: DEBUG nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1656.226033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.226033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.233074] env[62510]: DEBUG nova.compute.utils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Can not refresh info_cache because instance was not found {{(pid=62510) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1656.259542] env[62510]: DEBUG nova.network.neutron [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1656.325109] env[62510]: DEBUG nova.network.neutron [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.349156] env[62510]: INFO nova.compute.manager [-] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Took 1.02 seconds to deallocate network for instance. [ 1656.499199] env[62510]: DEBUG oslo_vmware.api [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768881, 'name': PowerOffVM_Task, 'duration_secs': 0.211452} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.499469] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1656.499671] env[62510]: DEBUG nova.compute.manager [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1656.500507] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef38ee49-7084-42a9-a93d-0cbf50503509 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.613796] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.717825] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be65a6f-0a48-4760-9fad-a7c89b9d29c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.727242] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36930f3b-3ea0-4aaa-99c8-2500be86da7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.735170] env[62510]: DEBUG nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1656.769678] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4af31c-23eb-4544-b179-a1de876f921c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.778882] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494fb1ed-e98b-4ebc-bd78-e1ba9618065f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.794048] env[62510]: DEBUG nova.compute.provider_tree [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1656.799677] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1656.799992] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1656.800287] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleting the datastore file [datastore1] 0d27da5c-20f3-4df1-86d2-036c904fd657 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1656.800602] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a9b0349-1d06-4434-9496-ea0838b41cd8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.809817] env[62510]: DEBUG oslo_vmware.api [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1656.809817] env[62510]: value = "task-1768882" [ 1656.809817] env[62510]: _type = "Task" [ 1656.809817] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.818228] env[62510]: DEBUG oslo_vmware.api [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.828069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Releasing lock "refresh_cache-1f0ab639-bfcb-48eb-a079-ea07dd627c2f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.828297] env[62510]: DEBUG nova.compute.manager [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62510) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1656.828486] env[62510]: DEBUG nova.compute.manager [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] [instance: 1f0ab639-bfcb-48eb-a079-ea07dd627c2f] Skipping network deallocation for instance since networking was not requested. {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 1656.858145] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.015548] env[62510]: DEBUG oslo_concurrency.lockutils [None req-82e43bba-8851-461c-8285-c43ae5a6cbd4 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.065s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.114159] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768871, 'name': CreateVM_Task, 'duration_secs': 4.21756} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.114405] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1657.115100] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.115261] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.115571] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1657.115827] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-794aa418-56f0-4571-b581-7a3a80633376 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.120863] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1657.120863] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52aaa626-b925-3457-3bec-0e113835dbc0" [ 1657.120863] env[62510]: _type = "Task" [ 1657.120863] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.130651] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52aaa626-b925-3457-3bec-0e113835dbc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.240244] env[62510]: DEBUG nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1657.258645] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1657.267045] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1657.267464] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1657.267464] env[62510]: DEBUG nova.virt.hardware [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1657.268332] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a564644-3542-4bc6-88c0-834c47f49966 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.277484] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3664e098-e6f6-43e5-a2de-fcf2060fa873 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.314747] env[62510]: ERROR nova.scheduler.client.report [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] [req-e3e2171e-3cbb-4e99-8eff-c08434449d25] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e3e2171e-3cbb-4e99-8eff-c08434449d25"}]} [ 1657.323725] env[62510]: DEBUG oslo_vmware.api [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177099} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.323958] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1657.324182] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1657.324371] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1657.324678] env[62510]: INFO nova.compute.manager [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Took 2.95 seconds to destroy the instance on the hypervisor. [ 1657.324804] env[62510]: DEBUG oslo.service.loopingcall [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1657.324989] env[62510]: DEBUG nova.compute.manager [-] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1657.325113] env[62510]: DEBUG nova.network.neutron [-] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1657.331430] env[62510]: DEBUG nova.scheduler.client.report [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1657.354803] env[62510]: DEBUG nova.scheduler.client.report [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1657.354803] env[62510]: DEBUG nova.compute.provider_tree [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1657.372766] env[62510]: DEBUG nova.scheduler.client.report [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1657.400322] env[62510]: DEBUG nova.scheduler.client.report [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1657.470221] env[62510]: INFO nova.compute.manager [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Rebuilding instance [ 1657.514393] env[62510]: DEBUG nova.compute.manager [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1657.515039] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b967e045-36fe-468b-a954-ba8126e28d24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.547586] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1657.547811] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1657.604231] env[62510]: DEBUG nova.compute.manager [req-d1ea7f22-b884-4a95-8493-3896a7186088 req-a9f82557-662d-415e-a267-df73929e0f7d service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Received event network-vif-plugged-8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1657.604485] env[62510]: DEBUG oslo_concurrency.lockutils [req-d1ea7f22-b884-4a95-8493-3896a7186088 req-a9f82557-662d-415e-a267-df73929e0f7d service nova] Acquiring lock "d1c20183-ba24-4a11-ad82-bf240d581322-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.604691] env[62510]: DEBUG oslo_concurrency.lockutils [req-d1ea7f22-b884-4a95-8493-3896a7186088 req-a9f82557-662d-415e-a267-df73929e0f7d service nova] Lock "d1c20183-ba24-4a11-ad82-bf240d581322-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.604871] env[62510]: DEBUG oslo_concurrency.lockutils [req-d1ea7f22-b884-4a95-8493-3896a7186088 req-a9f82557-662d-415e-a267-df73929e0f7d service nova] Lock "d1c20183-ba24-4a11-ad82-bf240d581322-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.605054] env[62510]: DEBUG nova.compute.manager [req-d1ea7f22-b884-4a95-8493-3896a7186088 req-a9f82557-662d-415e-a267-df73929e0f7d service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] No waiting events found dispatching network-vif-plugged-8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1657.605224] env[62510]: WARNING nova.compute.manager [req-d1ea7f22-b884-4a95-8493-3896a7186088 req-a9f82557-662d-415e-a267-df73929e0f7d service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Received unexpected event network-vif-plugged-8d06415c-4a6c-4092-aed2-02f3e574052b for instance with vm_state building and task_state spawning. [ 1657.634984] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52aaa626-b925-3457-3bec-0e113835dbc0, 'name': SearchDatastore_Task, 'duration_secs': 0.010374} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.635306] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.635543] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1657.635769] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.635967] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.636230] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1657.636458] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4bd237e-e2f1-4567-8f5d-d3e03ee846c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.648859] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1657.649377] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1657.652083] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92496e14-9712-4c91-a840-3fbf320f05f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.658108] env[62510]: DEBUG nova.network.neutron [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Successfully updated port: 8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1657.658655] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1657.658655] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bcc54d-8c20-8ed8-be27-b32b60991ae1" [ 1657.658655] env[62510]: _type = "Task" [ 1657.658655] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.678396] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bcc54d-8c20-8ed8-be27-b32b60991ae1, 'name': SearchDatastore_Task, 'duration_secs': 0.009589} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.679663] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a48010a-712e-430b-9f7b-afb9435e692c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.688603] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1657.688603] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522bbd18-5ecb-8e01-90d6-8c1c55334928" [ 1657.688603] env[62510]: _type = "Task" [ 1657.688603] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.700154] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522bbd18-5ecb-8e01-90d6-8c1c55334928, 'name': SearchDatastore_Task, 'duration_secs': 0.009694} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.702271] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.702614] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0082eb97-26e9-4196-b8e3-63460d32dd19/0082eb97-26e9-4196-b8e3-63460d32dd19.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1657.703969] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e3bdfd7-5ca1-4ea8-a852-96241fdb2c39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.711475] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1657.711475] env[62510]: value = "task-1768884" [ 1657.711475] env[62510]: _type = "Task" [ 1657.711475] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.719162] env[62510]: DEBUG nova.compute.manager [req-298d1d19-b57e-442a-8a9e-5a3c3672bb22 req-6c8413fe-c67a-49e1-880a-dd16c0091f51 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Received event network-vif-deleted-648008fa-8f2b-4cb2-a911-200874a59cc0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1657.719406] env[62510]: INFO nova.compute.manager [req-298d1d19-b57e-442a-8a9e-5a3c3672bb22 req-6c8413fe-c67a-49e1-880a-dd16c0091f51 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Neutron deleted interface 648008fa-8f2b-4cb2-a911-200874a59cc0; detaching it from the instance and deleting it from the info cache [ 1657.719617] env[62510]: DEBUG nova.network.neutron [req-298d1d19-b57e-442a-8a9e-5a3c3672bb22 req-6c8413fe-c67a-49e1-880a-dd16c0091f51 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.727584] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.848025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ec5dec1a-c502-43da-b7af-111abc37dd6c tempest-ServersListShow296Test-310805575 tempest-ServersListShow296Test-310805575-project-member] Lock "1f0ab639-bfcb-48eb-a079-ea07dd627c2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.114s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.934603] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b49c3aa-755d-437d-bba7-83f38964d8bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.943446] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a016ba79-8aae-418a-9416-0c751ecb52b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.975346] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4898577d-bc00-4089-b7fb-5bc6f3842643 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.983955] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1faa547d-35e6-4374-981c-95ae1bd2d4a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.001268] env[62510]: DEBUG nova.compute.provider_tree [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.056203] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.056369] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1658.163438] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1658.163747] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.163747] env[62510]: DEBUG nova.network.neutron [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1658.195588] env[62510]: DEBUG nova.network.neutron [-] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.225561] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768884, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.225943] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc51b8e2-9a90-40b7-8bd9-2e0a423a3d7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.238009] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04082b98-55c6-4281-9643-cbac2a2970ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.285955] env[62510]: DEBUG nova.compute.manager [req-298d1d19-b57e-442a-8a9e-5a3c3672bb22 req-6c8413fe-c67a-49e1-880a-dd16c0091f51 service nova] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Detach interface failed, port_id=648008fa-8f2b-4cb2-a911-200874a59cc0, reason: Instance 0d27da5c-20f3-4df1-86d2-036c904fd657 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1658.505319] env[62510]: DEBUG nova.scheduler.client.report [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1658.532172] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1658.532615] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35f9ccdb-4053-4d21-82c9-04c2a10cb112 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.542732] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1658.542732] env[62510]: value = "task-1768885" [ 1658.542732] env[62510]: _type = "Task" [ 1658.542732] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.551715] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.696974] env[62510]: DEBUG nova.network.neutron [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1658.699120] env[62510]: INFO nova.compute.manager [-] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Took 1.37 seconds to deallocate network for instance. [ 1658.725234] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526456} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.725558] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0082eb97-26e9-4196-b8e3-63460d32dd19/0082eb97-26e9-4196-b8e3-63460d32dd19.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1658.725749] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1658.726062] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9396ee2-9bd2-46aa-8d80-9e8994f2a34c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.734778] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1658.734778] env[62510]: value = "task-1768886" [ 1658.734778] env[62510]: _type = "Task" [ 1658.734778] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.746743] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768886, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.969183] env[62510]: DEBUG nova.network.neutron [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updating instance_info_cache with network_info: [{"id": "8d06415c-4a6c-4092-aed2-02f3e574052b", "address": "fa:16:3e:ab:4b:2a", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d06415c-4a", "ovs_interfaceid": "8d06415c-4a6c-4092-aed2-02f3e574052b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.013441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.291s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.015802] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.192s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.016984] env[62510]: DEBUG nova.objects.instance [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'resources' on Instance uuid b5ff2a10-3c76-469a-86e0-ed3b135bca37 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1659.035041] env[62510]: INFO nova.scheduler.client.report [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Deleted allocations for instance 641628d1-bb6d-4207-89b9-98014328e028 [ 1659.055167] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1659.055473] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1659.056420] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb52003-2c0e-4179-b2b2-4b9098de13d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.066340] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1659.066584] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34a23f88-f282-4d54-a200-d7557978ae08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.144724] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1659.145073] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1659.145306] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleting the datastore file [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1659.145970] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa3ae2c8-46f1-449b-8bed-83a25e876089 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.153257] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1659.153257] env[62510]: value = "task-1768888" [ 1659.153257] env[62510]: _type = "Task" [ 1659.153257] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.162077] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.205405] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.245864] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768886, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06701} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.246203] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1659.247029] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db882d53-3ab0-484d-9e7b-f46329b2def1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.270564] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 0082eb97-26e9-4196-b8e3-63460d32dd19/0082eb97-26e9-4196-b8e3-63460d32dd19.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1659.270953] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26bc3406-cae3-40da-bcb0-3c2aca45309f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.292159] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1659.292159] env[62510]: value = "task-1768889" [ 1659.292159] env[62510]: _type = "Task" [ 1659.292159] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.301357] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.472572] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Releasing lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.472813] env[62510]: DEBUG nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Instance network_info: |[{"id": "8d06415c-4a6c-4092-aed2-02f3e574052b", "address": "fa:16:3e:ab:4b:2a", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d06415c-4a", "ovs_interfaceid": "8d06415c-4a6c-4092-aed2-02f3e574052b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1659.473294] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:4b:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78e1ebb0-0130-446b-bf73-a0e59bbb95cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d06415c-4a6c-4092-aed2-02f3e574052b', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1659.481047] env[62510]: DEBUG oslo.service.loopingcall [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.481269] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1659.481495] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f813c9d-d392-4ccc-bcea-abc3adad82c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.502956] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1659.502956] env[62510]: value = "task-1768890" [ 1659.502956] env[62510]: _type = "Task" [ 1659.502956] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.512298] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768890, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.544031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24bc14d2-8c6b-47e6-9f89-f6569b6cd4a9 tempest-InstanceActionsV221TestJSON-2064152346 tempest-InstanceActionsV221TestJSON-2064152346-project-member] Lock "641628d1-bb6d-4207-89b9-98014328e028" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.775s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.671082] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176656} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.671716] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1659.671915] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1659.672105] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1659.679464] env[62510]: DEBUG nova.compute.manager [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Received event network-changed-8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1659.679656] env[62510]: DEBUG nova.compute.manager [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Refreshing instance network info cache due to event network-changed-8d06415c-4a6c-4092-aed2-02f3e574052b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1659.679872] env[62510]: DEBUG oslo_concurrency.lockutils [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] Acquiring lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.680028] env[62510]: DEBUG oslo_concurrency.lockutils [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] Acquired lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.680200] env[62510]: DEBUG nova.network.neutron [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Refreshing network info cache for port 8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1659.806471] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768889, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.014354] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644bb423-8e40-47f8-a18c-13b27881be42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.021990] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768890, 'name': CreateVM_Task, 'duration_secs': 0.40007} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.021990] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1660.023252] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.023252] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.023252] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1660.023601] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c827aa2-07ae-47e0-86fd-a59b744d1061 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.028990] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297701d4-8820-44cb-8273-e2afd381ffc4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.035846] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1660.035846] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f2108b-31d0-1927-6636-630b82a889da" [ 1660.035846] env[62510]: _type = "Task" [ 1660.035846] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.076409] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131486a1-6dc6-483f-bc09-fc61ab7046bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.083636] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f2108b-31d0-1927-6636-630b82a889da, 'name': SearchDatastore_Task, 'duration_secs': 0.012082} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.084344] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.084587] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1660.084818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.084964] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.085156] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1660.085421] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95d80d97-bea4-4deb-91f9-cf35d93efd91 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.094601] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e723d279-dc1b-4876-a4c4-ae254ac3530a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.111620] env[62510]: DEBUG nova.compute.provider_tree [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.118016] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1660.118016] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1660.118016] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea830332-b40f-4947-8d5e-a551384e7966 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.127498] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1660.127498] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527785b6-6c0c-e168-ead1-d44502879c6f" [ 1660.127498] env[62510]: _type = "Task" [ 1660.127498] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.135730] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527785b6-6c0c-e168-ead1-d44502879c6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.303480] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768889, 'name': ReconfigVM_Task, 'duration_secs': 0.594652} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.303836] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 0082eb97-26e9-4196-b8e3-63460d32dd19/0082eb97-26e9-4196-b8e3-63460d32dd19.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1660.304482] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d829dc69-66d4-4d5d-8411-a05edb0cc689 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.312821] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1660.312821] env[62510]: value = "task-1768891" [ 1660.312821] env[62510]: _type = "Task" [ 1660.312821] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.322745] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768891, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.388643] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.617096] env[62510]: DEBUG nova.scheduler.client.report [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1660.636828] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527785b6-6c0c-e168-ead1-d44502879c6f, 'name': SearchDatastore_Task, 'duration_secs': 0.011254} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.637667] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ba0e5d3-b2c7-4f68-b984-b61eb8c651a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.645214] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1660.645214] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e0fb45-c4c6-c005-d383-a16a761a5f5a" [ 1660.645214] env[62510]: _type = "Task" [ 1660.645214] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.654798] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e0fb45-c4c6-c005-d383-a16a761a5f5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.711503] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1660.711764] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1660.711918] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1660.712112] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1660.712263] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1660.712412] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1660.712624] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1660.712782] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1660.712948] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1660.713140] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1660.713321] env[62510]: DEBUG nova.virt.hardware [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1660.714729] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f2a22b-1aef-4d82-8b0b-b3b2e7f15746 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.723538] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3e7195-c8e0-405a-bf5e-4949513c3107 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.738605] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:6a:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e4fadb9-6725-488d-9382-0ca6b5eb0b23', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1660.746477] env[62510]: DEBUG oslo.service.loopingcall [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1660.746768] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1660.746999] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5e97cf0-2081-4bcf-a7df-7f953692cc6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.769644] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1660.769644] env[62510]: value = "task-1768892" [ 1660.769644] env[62510]: _type = "Task" [ 1660.769644] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.781849] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768892, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.825141] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768891, 'name': Rename_Task, 'duration_secs': 0.161474} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.825499] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1660.825768] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56b29a05-bca1-45e2-b889-adc839778b64 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.834286] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1660.834286] env[62510]: value = "task-1768893" [ 1660.834286] env[62510]: _type = "Task" [ 1660.834286] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.843767] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768893, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.931788] env[62510]: DEBUG nova.network.neutron [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updated VIF entry in instance network info cache for port 8d06415c-4a6c-4092-aed2-02f3e574052b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1660.932168] env[62510]: DEBUG nova.network.neutron [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updating instance_info_cache with network_info: [{"id": "8d06415c-4a6c-4092-aed2-02f3e574052b", "address": "fa:16:3e:ab:4b:2a", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d06415c-4a", "ovs_interfaceid": "8d06415c-4a6c-4092-aed2-02f3e574052b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.123491] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.107s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.125530] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.184s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.125761] env[62510]: DEBUG nova.objects.instance [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lazy-loading 'resources' on Instance uuid 13cdba63-5db4-419f-9e0b-244832d7866b {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1661.160844] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e0fb45-c4c6-c005-d383-a16a761a5f5a, 'name': SearchDatastore_Task, 'duration_secs': 0.014189} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.161075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.161557] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] d1c20183-ba24-4a11-ad82-bf240d581322/d1c20183-ba24-4a11-ad82-bf240d581322.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1661.162051] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a351cd9e-a207-458d-95d8-e42255db8818 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.169142] env[62510]: INFO nova.scheduler.client.report [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted allocations for instance b5ff2a10-3c76-469a-86e0-ed3b135bca37 [ 1661.174925] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1661.174925] env[62510]: value = "task-1768894" [ 1661.174925] env[62510]: _type = "Task" [ 1661.174925] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.187803] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.282668] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768892, 'name': CreateVM_Task, 'duration_secs': 0.466379} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.282843] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1661.283639] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.283849] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.284227] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1661.284527] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fafa985-a4cf-4000-8709-a7ba34852922 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.293402] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1661.293402] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cfc0bb-607b-a625-d05d-b242d1bc15f5" [ 1661.293402] env[62510]: _type = "Task" [ 1661.293402] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.306739] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cfc0bb-607b-a625-d05d-b242d1bc15f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.345776] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768893, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.435556] env[62510]: DEBUG oslo_concurrency.lockutils [req-1df91296-029e-4403-8cf2-e78501a10b2e req-aee724d8-f17e-48f7-9069-35c14cdad1ab service nova] Releasing lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.463579] env[62510]: INFO nova.compute.manager [None req-ba604331-050e-42ab-8c10-1dda4597fa16 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Get console output [ 1661.463738] env[62510]: WARNING nova.virt.vmwareapi.driver [None req-ba604331-050e-42ab-8c10-1dda4597fa16 tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] The console log is missing. Check your VSPC configuration [ 1661.645874] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.645874] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.645874] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1661.679798] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bf020cec-3ef6-4b06-86db-fe8d02d197f2 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b5ff2a10-3c76-469a-86e0-ed3b135bca37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.127s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.695632] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768894, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.806306] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cfc0bb-607b-a625-d05d-b242d1bc15f5, 'name': SearchDatastore_Task, 'duration_secs': 0.011776} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.808748] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.808991] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1661.809236] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.809383] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.809556] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1661.809986] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98cd1f5a-9bf9-4d63-a54f-44853c1f6af3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.827087] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1661.827310] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1661.828091] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a91ac296-146d-4cbd-a152-02b0b9197f55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.842124] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1661.842124] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52421444-55fc-b2aa-dcae-18e2c6471bb1" [ 1661.842124] env[62510]: _type = "Task" [ 1661.842124] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.846175] env[62510]: DEBUG oslo_vmware.api [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768893, 'name': PowerOnVM_Task, 'duration_secs': 0.766719} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.849986] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1661.850233] env[62510]: INFO nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Took 12.04 seconds to spawn the instance on the hypervisor. [ 1661.850416] env[62510]: DEBUG nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1661.855025] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2340c9-a0aa-454b-986b-7116b4d07fd0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.868466] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52421444-55fc-b2aa-dcae-18e2c6471bb1, 'name': SearchDatastore_Task, 'duration_secs': 0.019131} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.872762] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-752470c8-e6f0-4bd2-ab7d-15c0b8720276 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.879249] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1661.879249] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5282a833-cafb-618c-e306-3eb853c6c4c9" [ 1661.879249] env[62510]: _type = "Task" [ 1661.879249] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.891167] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5282a833-cafb-618c-e306-3eb853c6c4c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.148269] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6363bd-981d-4243-8fe9-b27108b0326c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.158361] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733ef4ab-76f1-4388-8c0d-a66ea8a4183f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.204479] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eeae454-4e75-463a-adcc-1d7e5d2bc5c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.216026] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768894, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597808} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.218322] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] d1c20183-ba24-4a11-ad82-bf240d581322/d1c20183-ba24-4a11-ad82-bf240d581322.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1662.218441] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1662.218734] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e04e2f77-7c85-4425-a04c-3ec479ba060b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.221563] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48aa6b99-850e-49ac-b52d-fb2267e52e17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.240257] env[62510]: DEBUG nova.compute.provider_tree [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1662.244046] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1662.244046] env[62510]: value = "task-1768895" [ 1662.244046] env[62510]: _type = "Task" [ 1662.244046] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.386899] env[62510]: INFO nova.compute.manager [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Took 58.57 seconds to build instance. [ 1662.399192] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5282a833-cafb-618c-e306-3eb853c6c4c9, 'name': SearchDatastore_Task, 'duration_secs': 0.01257} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.399192] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1662.399644] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1662.400205] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fc425bc-8fb7-4a64-932b-4d8b5db8cd44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.411779] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1662.411779] env[62510]: value = "task-1768896" [ 1662.411779] env[62510]: _type = "Task" [ 1662.411779] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.425339] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.712358] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.713382] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.713382] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.713382] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.713880] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.716019] env[62510]: INFO nova.compute.manager [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Terminating instance [ 1662.745897] env[62510]: DEBUG nova.scheduler.client.report [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1662.759689] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139462} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.759930] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1662.760744] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b7edb3-5466-4f5e-b3e3-5a073ebee2dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.785244] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] d1c20183-ba24-4a11-ad82-bf240d581322/d1c20183-ba24-4a11-ad82-bf240d581322.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1662.786306] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3269f17f-e30f-4a9e-9bd7-3a40e10e8c56 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.811855] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1662.811855] env[62510]: value = "task-1768897" [ 1662.811855] env[62510]: _type = "Task" [ 1662.811855] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.823192] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768897, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.889889] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea688d9c-0f95-4026-863b-ab89e87e406c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.127s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.922406] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768896, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.083781] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updating instance_info_cache with network_info: [{"id": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "address": "fa:16:3e:11:18:84", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4736e99-c6", "ovs_interfaceid": "f4736e99-c658-4d4e-ace8-a3b4552f43bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.220529] env[62510]: DEBUG nova.compute.manager [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1663.220749] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1663.221673] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1ed445-cf87-4a77-80e8-69fd653df143 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.232504] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1663.232761] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b192a5d9-483f-4ed7-8c40-99b42b8786e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.239929] env[62510]: DEBUG oslo_vmware.api [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1663.239929] env[62510]: value = "task-1768898" [ 1663.239929] env[62510]: _type = "Task" [ 1663.239929] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.247829] env[62510]: DEBUG oslo_vmware.api [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.254124] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.128s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.255911] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.646s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.260018] env[62510]: INFO nova.compute.claims [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1663.272852] env[62510]: INFO nova.scheduler.client.report [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Deleted allocations for instance 13cdba63-5db4-419f-9e0b-244832d7866b [ 1663.324850] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768897, 'name': ReconfigVM_Task, 'duration_secs': 0.379165} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.325362] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Reconfigured VM instance instance-00000040 to attach disk [datastore1] d1c20183-ba24-4a11-ad82-bf240d581322/d1c20183-ba24-4a11-ad82-bf240d581322.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1663.326047] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-184481d3-b34c-4926-a646-4267e6a50768 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.333229] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1663.333229] env[62510]: value = "task-1768899" [ 1663.333229] env[62510]: _type = "Task" [ 1663.333229] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.342292] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768899, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.424348] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768896, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.587541] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.587541] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1663.587541] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.587541] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.587541] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.587771] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.587860] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.588081] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.588283] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1663.589031] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.725448] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.725705] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.750796] env[62510]: DEBUG oslo_vmware.api [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768898, 'name': PowerOffVM_Task, 'duration_secs': 0.409619} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.751198] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1663.751405] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1663.751654] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bc2ed52-bcb6-4928-be28-5d5a2ccf6d53 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.783632] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e5607298-fd60-476c-a05f-800c3a28fabe tempest-FloatingIPsAssociationNegativeTestJSON-1137661344 tempest-FloatingIPsAssociationNegativeTestJSON-1137661344-project-member] Lock "13cdba63-5db4-419f-9e0b-244832d7866b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.774s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.843411] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768899, 'name': Rename_Task, 'duration_secs': 0.169847} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.843713] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1663.843920] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4177d2a4-697d-4974-ad60-2deae8f3f71a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.857018] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1663.857018] env[62510]: value = "task-1768901" [ 1663.857018] env[62510]: _type = "Task" [ 1663.857018] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.868617] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.869923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1663.870154] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1663.870343] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Deleting the datastore file [datastore1] 8bbafd7f-cdd1-4246-a509-2f97a6f78497 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1663.870602] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbdc47aa-bcdd-4220-ba1d-4cd1d1a3714b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.879635] env[62510]: DEBUG oslo_vmware.api [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for the task: (returnval){ [ 1663.879635] env[62510]: value = "task-1768902" [ 1663.879635] env[62510]: _type = "Task" [ 1663.879635] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.890693] env[62510]: DEBUG oslo_vmware.api [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.924120] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768896, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.983179] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "0082eb97-26e9-4196-b8e3-63460d32dd19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.983523] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.983794] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "0082eb97-26e9-4196-b8e3-63460d32dd19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.984021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.984233] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.987437] env[62510]: INFO nova.compute.manager [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Terminating instance [ 1664.091905] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.228991] env[62510]: DEBUG nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1664.374217] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768901, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.392881] env[62510]: DEBUG oslo_vmware.api [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Task: {'id': task-1768902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174901} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.393213] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1664.393409] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1664.393584] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1664.393785] env[62510]: INFO nova.compute.manager [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1664.394063] env[62510]: DEBUG oslo.service.loopingcall [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.397012] env[62510]: DEBUG nova.compute.manager [-] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1664.397141] env[62510]: DEBUG nova.network.neutron [-] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1664.426256] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768896, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.61263} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.426667] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1664.426837] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1664.427122] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-373b1db8-0ee7-4dde-9226-6a4d2269e70e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.438419] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1664.438419] env[62510]: value = "task-1768903" [ 1664.438419] env[62510]: _type = "Task" [ 1664.438419] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.453634] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.491636] env[62510]: DEBUG nova.compute.manager [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1664.491895] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1664.492905] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aeba46c-b816-43fe-8a1f-34feb05b558a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.503878] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1664.504182] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6cd4efb-387d-4f0f-a9b1-4780c7f666c7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.511745] env[62510]: DEBUG oslo_vmware.api [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1664.511745] env[62510]: value = "task-1768904" [ 1664.511745] env[62510]: _type = "Task" [ 1664.511745] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.523354] env[62510]: DEBUG oslo_vmware.api [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.756448] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.839122] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e7560f-0d3a-461a-b1a9-baa210c58ccf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.854162] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a35f657-3fca-407e-acf5-1c9d50099ece {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.894682] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1305e2-4934-4773-ae40-e02157e3377f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.900851] env[62510]: DEBUG oslo_vmware.api [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768901, 'name': PowerOnVM_Task, 'duration_secs': 0.592116} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.901635] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1664.901893] env[62510]: INFO nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1664.902120] env[62510]: DEBUG nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1664.902979] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e218692-b62e-4194-9aa1-e44df1748569 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.909248] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c2d4e2-47f3-4823-866f-e92d2800f50a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.930931] env[62510]: DEBUG nova.compute.provider_tree [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1664.947447] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106306} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.947727] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1664.949293] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d7006d-f6f2-46fd-b544-7b2d4a1272f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.975712] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1664.977472] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c3c3e53-aa8a-4aed-a910-16fe0e03722d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.997941] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1664.997941] env[62510]: value = "task-1768905" [ 1664.997941] env[62510]: _type = "Task" [ 1664.997941] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.007055] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768905, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.023514] env[62510]: DEBUG oslo_vmware.api [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768904, 'name': PowerOffVM_Task, 'duration_secs': 0.331101} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.023811] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1665.023993] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1665.024255] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47371696-443f-4151-8e76-90af824b7cb3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.124800] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1665.125260] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1665.125260] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleting the datastore file [datastore1] 0082eb97-26e9-4196-b8e3-63460d32dd19 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1665.126241] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44c1c0a3-ad75-41bd-9279-bf0d86ec0995 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.134214] env[62510]: DEBUG oslo_vmware.api [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1665.134214] env[62510]: value = "task-1768907" [ 1665.134214] env[62510]: _type = "Task" [ 1665.134214] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.144490] env[62510]: DEBUG oslo_vmware.api [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768907, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.430501] env[62510]: INFO nova.compute.manager [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Took 57.39 seconds to build instance. [ 1665.437667] env[62510]: DEBUG nova.scheduler.client.report [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1665.508663] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768905, 'name': ReconfigVM_Task, 'duration_secs': 0.50789} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.509822] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8/90869287-22bd-438c-8684-56f5d43e3ca8.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1665.511246] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-efbc0126-a588-4125-a6fa-9fbe726010e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.520134] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1665.520134] env[62510]: value = "task-1768908" [ 1665.520134] env[62510]: _type = "Task" [ 1665.520134] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.531579] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768908, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.645114] env[62510]: DEBUG oslo_vmware.api [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1768907, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234927} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.645114] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1665.645114] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1665.645423] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1665.645571] env[62510]: INFO nova.compute.manager [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1665.646323] env[62510]: DEBUG oslo.service.loopingcall [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1665.646323] env[62510]: DEBUG nova.compute.manager [-] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1665.646323] env[62510]: DEBUG nova.network.neutron [-] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1665.664056] env[62510]: DEBUG nova.compute.manager [req-5f0f97f3-ce19-40b9-b0b7-768b5ae2ae0a req-9d7c83d4-72d8-4374-9587-b720f7dfe6e3 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Received event network-vif-deleted-af2efe8c-492c-4033-b300-295761787dee {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1665.664304] env[62510]: INFO nova.compute.manager [req-5f0f97f3-ce19-40b9-b0b7-768b5ae2ae0a req-9d7c83d4-72d8-4374-9587-b720f7dfe6e3 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Neutron deleted interface af2efe8c-492c-4033-b300-295761787dee; detaching it from the instance and deleting it from the info cache [ 1665.664524] env[62510]: DEBUG nova.network.neutron [req-5f0f97f3-ce19-40b9-b0b7-768b5ae2ae0a req-9d7c83d4-72d8-4374-9587-b720f7dfe6e3 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.906483] env[62510]: DEBUG nova.network.neutron [-] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.938708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ba9992ab-229a-4979-9119-5cd46c31626e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.569s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.945749] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.946347] env[62510]: DEBUG nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1665.950260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.912s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.951844] env[62510]: INFO nova.compute.claims [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1666.037506] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768908, 'name': Rename_Task, 'duration_secs': 0.372669} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.037506] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1666.037506] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05fbed79-4b0e-4064-b557-12b75d456598 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.043260] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1666.043260] env[62510]: value = "task-1768909" [ 1666.043260] env[62510]: _type = "Task" [ 1666.043260] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.053066] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768909, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.169797] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5dfb938a-94f1-4300-acd7-e904f53e6b83 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.180218] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4cbe37-5f2e-427d-bd3e-cbb6bfce5daa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.216455] env[62510]: DEBUG nova.compute.manager [req-5f0f97f3-ce19-40b9-b0b7-768b5ae2ae0a req-9d7c83d4-72d8-4374-9587-b720f7dfe6e3 service nova] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Detach interface failed, port_id=af2efe8c-492c-4033-b300-295761787dee, reason: Instance 8bbafd7f-cdd1-4246-a509-2f97a6f78497 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1666.411297] env[62510]: INFO nova.compute.manager [-] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Took 2.01 seconds to deallocate network for instance. [ 1666.460587] env[62510]: DEBUG nova.compute.utils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1666.462874] env[62510]: DEBUG nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1666.463179] env[62510]: DEBUG nova.network.neutron [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1666.517766] env[62510]: DEBUG nova.policy [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1666.546659] env[62510]: DEBUG nova.network.neutron [-] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.562837] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768909, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.919164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1666.963216] env[62510]: DEBUG nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1667.038927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "d1c20183-ba24-4a11-ad82-bf240d581322" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.039222] env[62510]: DEBUG oslo_concurrency.lockutils [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.039471] env[62510]: INFO nova.compute.manager [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Rebooting instance [ 1667.048422] env[62510]: INFO nova.compute.manager [-] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Took 1.40 seconds to deallocate network for instance. [ 1667.064036] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768909, 'name': PowerOnVM_Task, 'duration_secs': 0.994214} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.067120] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1667.067120] env[62510]: DEBUG nova.compute.manager [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1667.068777] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d1671b-9478-4480-95d0-877338df2ea6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.078694] env[62510]: DEBUG nova.network.neutron [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Successfully created port: e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1667.497203] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46887397-1d1a-440c-a551-5fd8ce61d69f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.505559] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6294fcd3-35af-4df4-9f41-3e4eed73759f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.540670] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e53d85-c5ae-472d-a5b5-341a788ec301 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.554231] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b768ac2f-a5ff-4efa-b792-babe3b47a423 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.561634] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.565625] env[62510]: DEBUG oslo_concurrency.lockutils [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.565814] env[62510]: DEBUG oslo_concurrency.lockutils [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquired lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1667.566024] env[62510]: DEBUG nova.network.neutron [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1667.578554] env[62510]: DEBUG nova.compute.provider_tree [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.584661] env[62510]: INFO nova.compute.manager [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] bringing vm to original state: 'stopped' [ 1667.696863] env[62510]: DEBUG nova.compute.manager [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Received event network-vif-deleted-5c6ed492-d50b-40a9-933c-22b7bd04020b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1667.697359] env[62510]: DEBUG nova.compute.manager [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Received event network-changed-8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1667.697606] env[62510]: DEBUG nova.compute.manager [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Refreshing instance network info cache due to event network-changed-8d06415c-4a6c-4092-aed2-02f3e574052b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1667.697764] env[62510]: DEBUG oslo_concurrency.lockutils [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] Acquiring lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1667.897366] env[62510]: DEBUG nova.network.neutron [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updating instance_info_cache with network_info: [{"id": "8d06415c-4a6c-4092-aed2-02f3e574052b", "address": "fa:16:3e:ab:4b:2a", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d06415c-4a", "ovs_interfaceid": "8d06415c-4a6c-4092-aed2-02f3e574052b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.977050] env[62510]: DEBUG nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1668.002999] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1668.003474] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1668.003737] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.004141] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1668.004499] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.004838] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1668.005279] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1668.005628] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1668.006037] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1668.006450] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1668.007039] env[62510]: DEBUG nova.virt.hardware [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1668.008267] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9d11e5-bc76-4d01-abc4-f3a38fb546c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.022018] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44f42d4-4d8e-4333-b1fe-683673769f7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.086037] env[62510]: DEBUG nova.scheduler.client.report [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1668.401591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Releasing lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.401591] env[62510]: DEBUG oslo_concurrency.lockutils [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] Acquired lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.402429] env[62510]: DEBUG nova.network.neutron [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Refreshing network info cache for port 8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1668.591520] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "90869287-22bd-438c-8684-56f5d43e3ca8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.591802] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.592015] env[62510]: DEBUG nova.compute.manager [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1668.592816] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.593322] env[62510]: DEBUG nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1668.600668] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddc7ebe-48b2-4880-9b25-e337ff2656b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.607814] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 44.671s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.617605] env[62510]: DEBUG nova.compute.manager [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1668.908171] env[62510]: DEBUG nova.compute.manager [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1668.909286] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73828adc-0add-4fd2-b5a8-5797dbbed509 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.111888] env[62510]: DEBUG nova.compute.utils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1669.113509] env[62510]: DEBUG nova.objects.instance [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lazy-loading 'migration_context' on Instance uuid fae7e580-ab09-4fda-9cbe-0e066ddcb85c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.116345] env[62510]: DEBUG nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1669.116345] env[62510]: DEBUG nova.network.neutron [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1669.128520] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1669.128520] env[62510]: DEBUG nova.network.neutron [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Successfully updated port: e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1669.129309] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15804ed4-54b3-4c03-ae1c-d0cc9a29c112 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.134070] env[62510]: DEBUG nova.network.neutron [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updated VIF entry in instance network info cache for port 8d06415c-4a6c-4092-aed2-02f3e574052b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1669.134536] env[62510]: DEBUG nova.network.neutron [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updating instance_info_cache with network_info: [{"id": "8d06415c-4a6c-4092-aed2-02f3e574052b", "address": "fa:16:3e:ab:4b:2a", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d06415c-4a", "ovs_interfaceid": "8d06415c-4a6c-4092-aed2-02f3e574052b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.144092] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1669.144092] env[62510]: value = "task-1768910" [ 1669.144092] env[62510]: _type = "Task" [ 1669.144092] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.158710] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.179563] env[62510]: DEBUG nova.policy [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2253cda331f44a74a1a3fcf2620b0aa7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5bf95e12e67144409db5fd58d1a62df1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1669.443401] env[62510]: DEBUG nova.network.neutron [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Successfully created port: 3ac61e77-4998-412c-a492-0ae2c6578684 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1669.618076] env[62510]: DEBUG nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1669.634148] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.634347] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.634522] env[62510]: DEBUG nova.network.neutron [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1669.639963] env[62510]: DEBUG oslo_concurrency.lockutils [req-84177678-5da3-45d1-9791-805c184a27ea req-057473cf-81cd-47e6-aa4c-dca25ac139c7 service nova] Releasing lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.660725] env[62510]: DEBUG oslo_vmware.api [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768910, 'name': PowerOffVM_Task, 'duration_secs': 0.211392} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.660940] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1669.661213] env[62510]: DEBUG nova.compute.manager [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1669.662512] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36700f24-1053-4c73-9517-824750add050 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.925618] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5aff0d-6f71-4787-9736-36987a6e1730 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.938347] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Doing hard reboot of VM {{(pid=62510) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1669.941537] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-a30f685c-b7cc-4877-80ec-a74501d56db5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.947659] env[62510]: DEBUG oslo_vmware.api [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1669.947659] env[62510]: value = "task-1768911" [ 1669.947659] env[62510]: _type = "Task" [ 1669.947659] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.964510] env[62510]: DEBUG oslo_vmware.api [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768911, 'name': ResetVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.048367] env[62510]: DEBUG nova.compute.manager [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-vif-plugged-e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1670.048593] env[62510]: DEBUG oslo_concurrency.lockutils [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.049157] env[62510]: DEBUG oslo_concurrency.lockutils [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.049157] env[62510]: DEBUG oslo_concurrency.lockutils [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.049157] env[62510]: DEBUG nova.compute.manager [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] No waiting events found dispatching network-vif-plugged-e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1670.049328] env[62510]: WARNING nova.compute.manager [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received unexpected event network-vif-plugged-e0d0d69b-8e64-4722-b7d5-837e5c7482bc for instance with vm_state building and task_state spawning. [ 1670.049465] env[62510]: DEBUG nova.compute.manager [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-changed-e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1670.049624] env[62510]: DEBUG nova.compute.manager [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing instance network info cache due to event network-changed-e0d0d69b-8e64-4722-b7d5-837e5c7482bc. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1670.049791] env[62510]: DEBUG oslo_concurrency.lockutils [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.181223] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.589s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.185118] env[62510]: DEBUG nova.network.neutron [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1670.189119] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb2a139-96bf-4780-b6b6-fc58122d88e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.199182] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087c1fc6-03b7-457c-97ec-c1529c2ab43c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.244305] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037cfb86-dc0a-4711-b73a-2ee8378a589f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.253441] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bb8197-3902-4f49-a008-5e9880ffeac1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.270124] env[62510]: DEBUG nova.compute.provider_tree [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1670.396700] env[62510]: DEBUG nova.network.neutron [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.462501] env[62510]: DEBUG oslo_vmware.api [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768911, 'name': ResetVM_Task, 'duration_secs': 0.098411} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.462501] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Did hard reboot of VM {{(pid=62510) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1670.462501] env[62510]: DEBUG nova.compute.manager [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1670.462902] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdb99aa-7db5-456f-856a-44f8619fc2a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.629557] env[62510]: DEBUG nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1670.667557] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1670.667557] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1670.667557] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1670.667557] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1670.667804] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1670.667804] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1670.668845] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1670.672099] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1670.672312] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1670.672516] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1670.672699] env[62510]: DEBUG nova.virt.hardware [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1670.673641] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a909151b-7945-4e34-b5eb-8431a07162a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.690583] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca8af0e-38a4-473e-b303-fb79641854f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.698833] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.775088] env[62510]: DEBUG nova.scheduler.client.report [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1670.899588] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.901836] env[62510]: DEBUG nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Instance network_info: |[{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1670.902208] env[62510]: DEBUG oslo_concurrency.lockutils [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.902396] env[62510]: DEBUG nova.network.neutron [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing network info cache for port e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1670.903635] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:3e:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0d0d69b-8e64-4722-b7d5-837e5c7482bc', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1670.916979] env[62510]: DEBUG oslo.service.loopingcall [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.917421] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1670.918290] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5089dcce-4f3e-4a7d-8911-fd03d00abd54 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.942739] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1670.942739] env[62510]: value = "task-1768912" [ 1670.942739] env[62510]: _type = "Task" [ 1670.942739] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.957254] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768912, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.977849] env[62510]: DEBUG oslo_concurrency.lockutils [None req-741da164-1ecc-42d9-9388-7ce0dc16e7eb tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.938s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.205239] env[62510]: DEBUG nova.network.neutron [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Successfully updated port: 3ac61e77-4998-412c-a492-0ae2c6578684 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1671.455491] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768912, 'name': CreateVM_Task, 'duration_secs': 0.435377} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.457805] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1671.458512] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.458690] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.459039] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1671.459625] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b09f25f-acf7-4cfb-9314-727d9683d0e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.468993] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1671.468993] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529457af-d9fa-33a3-f9ab-ffe8da60bac9" [ 1671.468993] env[62510]: _type = "Task" [ 1671.468993] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.478435] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529457af-d9fa-33a3-f9ab-ffe8da60bac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.620519] env[62510]: DEBUG nova.compute.manager [req-929c21c4-d373-414d-aafa-082eb75e5380 req-cd422431-5693-43d8-a7a5-801d124aa9b4 service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Received event network-vif-plugged-3ac61e77-4998-412c-a492-0ae2c6578684 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1671.621482] env[62510]: DEBUG oslo_concurrency.lockutils [req-929c21c4-d373-414d-aafa-082eb75e5380 req-cd422431-5693-43d8-a7a5-801d124aa9b4 service nova] Acquiring lock "31fe5643-dece-484f-92d6-7c7cafbd51e4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.621829] env[62510]: DEBUG oslo_concurrency.lockutils [req-929c21c4-d373-414d-aafa-082eb75e5380 req-cd422431-5693-43d8-a7a5-801d124aa9b4 service nova] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.622123] env[62510]: DEBUG oslo_concurrency.lockutils [req-929c21c4-d373-414d-aafa-082eb75e5380 req-cd422431-5693-43d8-a7a5-801d124aa9b4 service nova] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.622388] env[62510]: DEBUG nova.compute.manager [req-929c21c4-d373-414d-aafa-082eb75e5380 req-cd422431-5693-43d8-a7a5-801d124aa9b4 service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] No waiting events found dispatching network-vif-plugged-3ac61e77-4998-412c-a492-0ae2c6578684 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1671.622651] env[62510]: WARNING nova.compute.manager [req-929c21c4-d373-414d-aafa-082eb75e5380 req-cd422431-5693-43d8-a7a5-801d124aa9b4 service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Received unexpected event network-vif-plugged-3ac61e77-4998-412c-a492-0ae2c6578684 for instance with vm_state building and task_state spawning. [ 1671.650698] env[62510]: DEBUG nova.network.neutron [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updated VIF entry in instance network info cache for port e0d0d69b-8e64-4722-b7d5-837e5c7482bc. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1671.650970] env[62510]: DEBUG nova.network.neutron [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.708698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "refresh_cache-31fe5643-dece-484f-92d6-7c7cafbd51e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1671.708871] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquired lock "refresh_cache-31fe5643-dece-484f-92d6-7c7cafbd51e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.713028] env[62510]: DEBUG nova.network.neutron [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1671.788134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.181s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.794815] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.821s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.796667] env[62510]: INFO nova.compute.claims [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1671.989921] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529457af-d9fa-33a3-f9ab-ffe8da60bac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.056416] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "2dce738b-9624-4a74-8b8c-042e45b693b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.056661] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.153987] env[62510]: DEBUG oslo_concurrency.lockutils [req-86dbf9ee-5c02-4ea1-8caa-86789ff4227e req-da132f58-c419-436f-a375-cb4a4a31b55e service nova] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.251153] env[62510]: DEBUG nova.network.neutron [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1672.343643] env[62510]: DEBUG nova.compute.manager [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Received event network-changed-8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1672.343840] env[62510]: DEBUG nova.compute.manager [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Refreshing instance network info cache due to event network-changed-8d06415c-4a6c-4092-aed2-02f3e574052b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1672.344601] env[62510]: DEBUG oslo_concurrency.lockutils [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] Acquiring lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.344601] env[62510]: DEBUG oslo_concurrency.lockutils [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] Acquired lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.344601] env[62510]: DEBUG nova.network.neutron [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Refreshing network info cache for port 8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1672.409545] env[62510]: DEBUG nova.network.neutron [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Updating instance_info_cache with network_info: [{"id": "3ac61e77-4998-412c-a492-0ae2c6578684", "address": "fa:16:3e:d6:f3:6b", "network": {"id": "66e4fa42-ae7b-4140-8ba8-761151c90a2f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-901146547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bf95e12e67144409db5fd58d1a62df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ac61e77-49", "ovs_interfaceid": "3ac61e77-4998-412c-a492-0ae2c6578684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.482298] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529457af-d9fa-33a3-f9ab-ffe8da60bac9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.560225] env[62510]: DEBUG nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1672.631120] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "d1c20183-ba24-4a11-ad82-bf240d581322" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.631403] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.631636] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "d1c20183-ba24-4a11-ad82-bf240d581322-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.631821] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.631991] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.634373] env[62510]: INFO nova.compute.manager [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Terminating instance [ 1672.812134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "90869287-22bd-438c-8684-56f5d43e3ca8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.812401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.812589] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "90869287-22bd-438c-8684-56f5d43e3ca8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.812769] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.812942] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.814888] env[62510]: INFO nova.compute.manager [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Terminating instance [ 1672.912916] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Releasing lock "refresh_cache-31fe5643-dece-484f-92d6-7c7cafbd51e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.914042] env[62510]: DEBUG nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Instance network_info: |[{"id": "3ac61e77-4998-412c-a492-0ae2c6578684", "address": "fa:16:3e:d6:f3:6b", "network": {"id": "66e4fa42-ae7b-4140-8ba8-761151c90a2f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-901146547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bf95e12e67144409db5fd58d1a62df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ac61e77-49", "ovs_interfaceid": "3ac61e77-4998-412c-a492-0ae2c6578684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1672.914042] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:f3:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ac61e77-4998-412c-a492-0ae2c6578684', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1672.921215] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Creating folder: Project (5bf95e12e67144409db5fd58d1a62df1). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1672.925627] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f645f24-052d-456b-a308-e42ee0643730 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.944161] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Created folder: Project (5bf95e12e67144409db5fd58d1a62df1) in parent group-v367197. [ 1672.944161] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Creating folder: Instances. Parent ref: group-v367369. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1672.944161] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00ffdca2-6485-456b-9c36-c03c6ed0742c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.953894] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Created folder: Instances in parent group-v367369. [ 1672.954180] env[62510]: DEBUG oslo.service.loopingcall [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.956866] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1672.957290] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-072650b2-003b-41c7-8640-419e7f7abb37 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.986631] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529457af-d9fa-33a3-f9ab-ffe8da60bac9, 'name': SearchDatastore_Task, 'duration_secs': 1.282976} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.988793] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.989060] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1672.989301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.989451] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.989631] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1672.989894] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1672.989894] env[62510]: value = "task-1768915" [ 1672.989894] env[62510]: _type = "Task" [ 1672.989894] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.990175] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13503097-23e3-45a6-af9f-15e9c398c3d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.007963] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768915, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.012022] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1673.012022] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1673.012022] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e2e8f30-2692-4e32-b24a-130db4569afb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.020019] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1673.020019] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b317ad-41f1-8751-0cc0-10ac9430af40" [ 1673.020019] env[62510]: _type = "Task" [ 1673.020019] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.029061] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b317ad-41f1-8751-0cc0-10ac9430af40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.090159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.112788] env[62510]: DEBUG nova.network.neutron [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updated VIF entry in instance network info cache for port 8d06415c-4a6c-4092-aed2-02f3e574052b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1673.113178] env[62510]: DEBUG nova.network.neutron [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updating instance_info_cache with network_info: [{"id": "8d06415c-4a6c-4092-aed2-02f3e574052b", "address": "fa:16:3e:ab:4b:2a", "network": {"id": "f8a73c35-302b-46f2-9c55-63efc73eee95", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1580802535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ca23491c4194bee84d0e9be0b015342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78e1ebb0-0130-446b-bf73-a0e59bbb95cc", "external-id": "nsx-vlan-transportzone-414", "segmentation_id": 414, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d06415c-4a", "ovs_interfaceid": "8d06415c-4a6c-4092-aed2-02f3e574052b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.138965] env[62510]: DEBUG nova.compute.manager [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1673.139221] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1673.142600] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b3f37d-304a-46a7-b643-6e83994e1f82 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.152378] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1673.152645] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b743b612-e3d2-408f-a00b-d5b112ee7ad2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.161251] env[62510]: DEBUG oslo_vmware.api [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1673.161251] env[62510]: value = "task-1768916" [ 1673.161251] env[62510]: _type = "Task" [ 1673.161251] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.173486] env[62510]: DEBUG oslo_vmware.api [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.317936] env[62510]: DEBUG nova.compute.manager [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1673.318383] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1673.319667] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca9f40f-1fc4-4804-a4fc-ee8e93404286 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.328122] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1673.338439] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c2d6875-e064-497a-bf1f-dc25a1fdf01b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.353105] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4671f8-30b7-443a-b07b-acce191507f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.356746] env[62510]: INFO nova.compute.manager [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Swapping old allocation on dict_keys(['c3653102-341b-4ed1-8b1f-1abaf8aa3e56']) held by migration 8d1549df-6eeb-4b96-9648-01fd9071d48d for instance [ 1673.364526] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be21269-90bc-4d5e-bd32-a35a2eec9931 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.408222] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d62674-f831-4ab1-b017-6c2e98917991 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.417689] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa5704f-c835-41ba-8b9c-f6d445b88aaf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.423955] env[62510]: DEBUG nova.scheduler.client.report [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Overwriting current allocation {'allocations': {'c3653102-341b-4ed1-8b1f-1abaf8aa3e56': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 94}}, 'project_id': 'c74083aa7b4a4db5b9b6d6248beb3ff3', 'user_id': 'ec8f49592421487c89b77efc86542f3e', 'consumer_generation': 1} on consumer fae7e580-ab09-4fda-9cbe-0e066ddcb85c {{(pid=62510) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1673.437650] env[62510]: DEBUG nova.compute.provider_tree [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.466641] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1673.466912] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1673.467126] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleting the datastore file [datastore1] 90869287-22bd-438c-8684-56f5d43e3ca8 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1673.467415] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ef1c933-ee2d-49df-9e08-a4d86af1a9b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.477228] env[62510]: DEBUG oslo_vmware.api [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1673.477228] env[62510]: value = "task-1768918" [ 1673.477228] env[62510]: _type = "Task" [ 1673.477228] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.488144] env[62510]: DEBUG oslo_vmware.api [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.502163] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768915, 'name': CreateVM_Task, 'duration_secs': 0.450727} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.502306] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1673.502971] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.503142] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.503614] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1673.503866] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20183f08-56f8-4f94-8c8e-d5196d2238ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.509114] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1673.509114] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522da4b5-e86d-590a-3e6d-84f3c0ce1dbd" [ 1673.509114] env[62510]: _type = "Task" [ 1673.509114] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.517451] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522da4b5-e86d-590a-3e6d-84f3c0ce1dbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.525902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.526210] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquired lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.526284] env[62510]: DEBUG nova.network.neutron [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1673.530746] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b317ad-41f1-8751-0cc0-10ac9430af40, 'name': SearchDatastore_Task, 'duration_secs': 0.01106} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.531758] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6de42cf6-f65b-4f37-bff5-62d9772ce530 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.538049] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1673.538049] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52fe7332-fe4b-b71d-23bf-f91a5d316cad" [ 1673.538049] env[62510]: _type = "Task" [ 1673.538049] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.546887] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fe7332-fe4b-b71d-23bf-f91a5d316cad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.616368] env[62510]: DEBUG oslo_concurrency.lockutils [req-9a3a81f5-4ad1-4a7b-832e-504219a203d9 req-bbc34987-d189-46d5-9311-8c68910c1d28 service nova] Releasing lock "refresh_cache-d1c20183-ba24-4a11-ad82-bf240d581322" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.645792] env[62510]: DEBUG nova.compute.manager [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Received event network-changed-3ac61e77-4998-412c-a492-0ae2c6578684 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1673.646124] env[62510]: DEBUG nova.compute.manager [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Refreshing instance network info cache due to event network-changed-3ac61e77-4998-412c-a492-0ae2c6578684. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1673.646305] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] Acquiring lock "refresh_cache-31fe5643-dece-484f-92d6-7c7cafbd51e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.646470] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] Acquired lock "refresh_cache-31fe5643-dece-484f-92d6-7c7cafbd51e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.646640] env[62510]: DEBUG nova.network.neutron [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Refreshing network info cache for port 3ac61e77-4998-412c-a492-0ae2c6578684 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1673.671237] env[62510]: DEBUG oslo_vmware.api [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768916, 'name': PowerOffVM_Task, 'duration_secs': 0.201815} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.671500] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1673.671671] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1673.671922] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5472dd5b-f698-4674-bb81-95677dbe40f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.756210] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1673.756461] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1673.756643] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Deleting the datastore file [datastore1] d1c20183-ba24-4a11-ad82-bf240d581322 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1673.756908] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cff4840-729a-4ec7-b936-cb2f23537407 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.764067] env[62510]: DEBUG oslo_vmware.api [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1673.764067] env[62510]: value = "task-1768920" [ 1673.764067] env[62510]: _type = "Task" [ 1673.764067] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.772693] env[62510]: DEBUG oslo_vmware.api [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768920, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.942123] env[62510]: DEBUG nova.scheduler.client.report [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1673.987772] env[62510]: DEBUG oslo_vmware.api [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1768918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175664} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.988069] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.988273] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1673.988453] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1673.988627] env[62510]: INFO nova.compute.manager [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Took 0.67 seconds to destroy the instance on the hypervisor. [ 1673.988871] env[62510]: DEBUG oslo.service.loopingcall [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.989079] env[62510]: DEBUG nova.compute.manager [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1673.989181] env[62510]: DEBUG nova.network.neutron [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1674.022296] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522da4b5-e86d-590a-3e6d-84f3c0ce1dbd, 'name': SearchDatastore_Task, 'duration_secs': 0.011388} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.022587] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.022818] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.023161] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.049261] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fe7332-fe4b-b71d-23bf-f91a5d316cad, 'name': SearchDatastore_Task, 'duration_secs': 0.010056} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.049750] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.050018] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 241d842d-3dd5-4ac2-a18a-12b9c9fbd340/241d842d-3dd5-4ac2-a18a-12b9c9fbd340.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1674.050614] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.050810] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.051052] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1eec7947-9842-4715-bd14-ea2db711c485 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.053125] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fd308e5-b162-46ba-98cb-7f712f8c8cc7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.063048] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1674.063048] env[62510]: value = "task-1768921" [ 1674.063048] env[62510]: _type = "Task" [ 1674.063048] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.064294] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.064467] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.068246] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c24d134-fc1c-4a25-8297-e8284d2c8a40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.076724] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768921, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.079987] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1674.079987] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c6e61-925c-60b7-194a-8b4b7f2a3d35" [ 1674.079987] env[62510]: _type = "Task" [ 1674.079987] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.093047] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522c6e61-925c-60b7-194a-8b4b7f2a3d35, 'name': SearchDatastore_Task, 'duration_secs': 0.016523} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.093823] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41ff6b38-5dc8-4691-8bf6-aafa3564b9cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.099443] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1674.099443] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c0e204-f87a-64c2-3d7d-47d4e3f28d6e" [ 1674.099443] env[62510]: _type = "Task" [ 1674.099443] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.108530] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c0e204-f87a-64c2-3d7d-47d4e3f28d6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.275515] env[62510]: DEBUG oslo_vmware.api [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1768920, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167807} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.275822] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1674.276027] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1674.276239] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1674.276713] env[62510]: INFO nova.compute.manager [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1674.277037] env[62510]: DEBUG oslo.service.loopingcall [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1674.277265] env[62510]: DEBUG nova.compute.manager [-] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1674.277355] env[62510]: DEBUG nova.network.neutron [-] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1674.375686] env[62510]: DEBUG nova.network.neutron [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance_info_cache with network_info: [{"id": "21d515b9-b00f-45cc-9437-318ee6bba755", "address": "fa:16:3e:66:30:96", "network": {"id": "9c166e44-2e3c-47a6-9d04-6867c5f55bf4", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "06084a351fb546e09252574b82e81812", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21d515b9-b0", "ovs_interfaceid": "21d515b9-b00f-45cc-9437-318ee6bba755", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.448564] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.449285] env[62510]: DEBUG nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1674.453652] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.555s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.454143] env[62510]: DEBUG nova.objects.instance [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lazy-loading 'resources' on Instance uuid b004fba7-13e0-40f0-827d-8d09b7717176 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.587165] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768921, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.613877] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c0e204-f87a-64c2-3d7d-47d4e3f28d6e, 'name': SearchDatastore_Task, 'duration_secs': 0.010277} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.616655] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.617044] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 31fe5643-dece-484f-92d6-7c7cafbd51e4/31fe5643-dece-484f-92d6-7c7cafbd51e4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1674.617393] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54894e48-f953-40d4-878f-d23dc036fc27 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.625393] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1674.625393] env[62510]: value = "task-1768922" [ 1674.625393] env[62510]: _type = "Task" [ 1674.625393] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.633706] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.681629] env[62510]: DEBUG nova.network.neutron [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Updated VIF entry in instance network info cache for port 3ac61e77-4998-412c-a492-0ae2c6578684. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1674.681995] env[62510]: DEBUG nova.network.neutron [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Updating instance_info_cache with network_info: [{"id": "3ac61e77-4998-412c-a492-0ae2c6578684", "address": "fa:16:3e:d6:f3:6b", "network": {"id": "66e4fa42-ae7b-4140-8ba8-761151c90a2f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-901146547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5bf95e12e67144409db5fd58d1a62df1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ac61e77-49", "ovs_interfaceid": "3ac61e77-4998-412c-a492-0ae2c6578684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.879886] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Releasing lock "refresh_cache-fae7e580-ab09-4fda-9cbe-0e066ddcb85c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.881700] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1674.882942] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-922ee16b-a0a3-49dd-8b28-5739303ca044 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.891974] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1674.891974] env[62510]: value = "task-1768923" [ 1674.891974] env[62510]: _type = "Task" [ 1674.891974] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.904648] env[62510]: DEBUG nova.network.neutron [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.906478] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.957965] env[62510]: DEBUG nova.compute.utils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1674.963030] env[62510]: DEBUG nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1674.963030] env[62510]: DEBUG nova.network.neutron [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1675.032202] env[62510]: DEBUG nova.policy [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80f05c3e00b84277b4401aa98a253692', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bae4f0adee8c4c28add1849316448538', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1675.080592] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768921, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569051} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.081105] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 241d842d-3dd5-4ac2-a18a-12b9c9fbd340/241d842d-3dd5-4ac2-a18a-12b9c9fbd340.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1675.081201] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1675.081474] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f8758d1-8390-4622-b0af-cf80e4012d22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.095226] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1675.095226] env[62510]: value = "task-1768924" [ 1675.095226] env[62510]: _type = "Task" [ 1675.095226] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.111152] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.139175] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768922, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.185131] env[62510]: DEBUG oslo_concurrency.lockutils [req-1d3967f2-658c-451d-ab57-e8f8ee162f60 req-32c15157-8e73-440d-ab37-3b1e7a91451f service nova] Releasing lock "refresh_cache-31fe5643-dece-484f-92d6-7c7cafbd51e4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.193808] env[62510]: DEBUG nova.compute.manager [req-57b0464f-ca12-4c78-b9ba-6aa8203528f2 req-99a9cedd-b99c-41f7-86e8-0cf57bb2e8d3 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Received event network-vif-deleted-8d06415c-4a6c-4092-aed2-02f3e574052b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1675.194177] env[62510]: INFO nova.compute.manager [req-57b0464f-ca12-4c78-b9ba-6aa8203528f2 req-99a9cedd-b99c-41f7-86e8-0cf57bb2e8d3 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Neutron deleted interface 8d06415c-4a6c-4092-aed2-02f3e574052b; detaching it from the instance and deleting it from the info cache [ 1675.194256] env[62510]: DEBUG nova.network.neutron [req-57b0464f-ca12-4c78-b9ba-6aa8203528f2 req-99a9cedd-b99c-41f7-86e8-0cf57bb2e8d3 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.406992] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768923, 'name': PowerOffVM_Task, 'duration_secs': 0.384853} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.407269] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1675.408074] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:36:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c7683c68-4a26-4844-9915-d8d489d9d625',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1203151111',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1675.408294] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1675.408449] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.408634] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1675.408799] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.409020] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1675.409227] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1675.410038] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1675.410038] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1675.410038] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1675.410038] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1675.418392] env[62510]: INFO nova.compute.manager [-] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Took 1.43 seconds to deallocate network for instance. [ 1675.418633] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37e4cf60-60bb-42ec-ab92-2e1f0179ca03 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.433948] env[62510]: DEBUG nova.network.neutron [-] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.444279] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1675.444279] env[62510]: value = "task-1768925" [ 1675.444279] env[62510]: _type = "Task" [ 1675.444279] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.454648] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768925, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.466208] env[62510]: DEBUG nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1675.496295] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec30b992-39ac-4724-9acc-e7a812396a39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.508095] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56a35de-2a96-4a0d-9c64-e487d7d0d2a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.546297] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f881406-6e40-4785-afb6-07b3f458da04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.555212] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a1d0ea-4f77-41eb-a7a4-51fefeb13f21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.570639] env[62510]: DEBUG nova.compute.provider_tree [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.575468] env[62510]: DEBUG nova.network.neutron [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Successfully created port: 5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1675.605247] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088172} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.605533] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1675.606394] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387a81b5-3126-4a35-b4b2-c9d2b50cc0aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.629800] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 241d842d-3dd5-4ac2-a18a-12b9c9fbd340/241d842d-3dd5-4ac2-a18a-12b9c9fbd340.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.630194] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeaf91ea-af06-434f-b753-3e7aadb3d104 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.653982] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528742} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.655312] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 31fe5643-dece-484f-92d6-7c7cafbd51e4/31fe5643-dece-484f-92d6-7c7cafbd51e4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1675.655542] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1675.655871] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1675.655871] env[62510]: value = "task-1768926" [ 1675.655871] env[62510]: _type = "Task" [ 1675.655871] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.656098] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2baa52a1-09d5-4639-93cc-9751c0827ea6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.670146] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768926, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.670817] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1675.670817] env[62510]: value = "task-1768927" [ 1675.670817] env[62510]: _type = "Task" [ 1675.670817] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.678032] env[62510]: DEBUG nova.compute.manager [req-aa0e552b-b515-4330-b4c8-38512fcc8a41 req-a29ac741-8ea9-4be0-96ef-96c6d59157d0 service nova] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Received event network-vif-deleted-1e4fadb9-6725-488d-9382-0ca6b5eb0b23 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1675.680167] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.696867] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db76934b-3355-44ea-895e-8d87153b666d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.708099] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a14a76-2ff2-4af6-978b-cf08c242782d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.747670] env[62510]: DEBUG nova.compute.manager [req-57b0464f-ca12-4c78-b9ba-6aa8203528f2 req-99a9cedd-b99c-41f7-86e8-0cf57bb2e8d3 service nova] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Detach interface failed, port_id=8d06415c-4a6c-4092-aed2-02f3e574052b, reason: Instance d1c20183-ba24-4a11-ad82-bf240d581322 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1675.944332] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.944905] env[62510]: INFO nova.compute.manager [-] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Took 1.67 seconds to deallocate network for instance. [ 1675.964972] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768925, 'name': ReconfigVM_Task, 'duration_secs': 0.160967} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.966295] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a89886c7-0dd4-4deb-a5f9-efe9002deb77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.995250] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:36:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='c7683c68-4a26-4844-9915-d8d489d9d625',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1203151111',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1675.995250] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1675.995426] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1675.995560] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1675.995712] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1675.995860] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1675.996151] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1675.996358] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1675.996536] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1675.996703] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1675.997458] env[62510]: DEBUG nova.virt.hardware [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1675.998673] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9468033f-2057-4b9b-ad2b-d0bf88dc7fab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.005189] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1676.005189] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5218693d-e905-98ba-b371-51407fd7ff00" [ 1676.005189] env[62510]: _type = "Task" [ 1676.005189] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.014503] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5218693d-e905-98ba-b371-51407fd7ff00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.073450] env[62510]: DEBUG nova.scheduler.client.report [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1676.168472] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768926, 'name': ReconfigVM_Task, 'duration_secs': 0.31596} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.168781] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 241d842d-3dd5-4ac2-a18a-12b9c9fbd340/241d842d-3dd5-4ac2-a18a-12b9c9fbd340.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1676.169426] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edb926ef-d323-4107-8d3d-f330a84577b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.181182] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081657} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.181182] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1676.181182] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf3c0e1-67e1-4b5e-bb1e-06c24fe5a13c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.184802] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1676.184802] env[62510]: value = "task-1768928" [ 1676.184802] env[62510]: _type = "Task" [ 1676.184802] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.205406] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 31fe5643-dece-484f-92d6-7c7cafbd51e4/31fe5643-dece-484f-92d6-7c7cafbd51e4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1676.206139] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95503b2c-6bd8-4513-90b8-55bd9930e55f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.223735] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768928, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.229625] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1676.229625] env[62510]: value = "task-1768929" [ 1676.229625] env[62510]: _type = "Task" [ 1676.229625] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.240179] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768929, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.458210] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.500506] env[62510]: DEBUG nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1676.518125] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5218693d-e905-98ba-b371-51407fd7ff00, 'name': SearchDatastore_Task, 'duration_secs': 0.063972} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.524040] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfiguring VM instance instance-0000002e to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1676.526580] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d637ce7c-93dd-4ab2-8f99-911ffde07fd8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.546610] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1676.546858] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1676.547024] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1676.547235] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1676.547399] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1676.547547] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1676.547756] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1676.547940] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1676.548125] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1676.548294] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1676.548498] env[62510]: DEBUG nova.virt.hardware [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1676.549356] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cba1d8-737a-494f-b62d-2ff536ef6dbc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.553512] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1676.553512] env[62510]: value = "task-1768930" [ 1676.553512] env[62510]: _type = "Task" [ 1676.553512] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.561632] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca2043a-675d-4c62-a981-bc851957ae18 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.568660] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.579415] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.126s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.582020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 38.297s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.582020] env[62510]: DEBUG nova.objects.instance [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1676.603255] env[62510]: INFO nova.scheduler.client.report [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Deleted allocations for instance b004fba7-13e0-40f0-827d-8d09b7717176 [ 1676.695963] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768928, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.739695] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768929, 'name': ReconfigVM_Task, 'duration_secs': 0.305701} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.739987] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 31fe5643-dece-484f-92d6-7c7cafbd51e4/31fe5643-dece-484f-92d6-7c7cafbd51e4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1676.740702] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40bc8488-541e-49c5-98bd-3441298a2849 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.747854] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1676.747854] env[62510]: value = "task-1768931" [ 1676.747854] env[62510]: _type = "Task" [ 1676.747854] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.762198] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768931, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.067993] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768930, 'name': ReconfigVM_Task, 'duration_secs': 0.217752} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.067993] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfigured VM instance instance-0000002e to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1677.067993] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce80df3-dfc3-4585-8016-ed4ad5f42d1c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.090643] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1677.094013] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66809239-4883-494a-9436-9c013a140c07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.111158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3d39adba-4dd8-4e00-9fc0-5828ee8fac19 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.529s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.114736] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.324s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.114736] env[62510]: DEBUG nova.objects.instance [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lazy-loading 'resources' on Instance uuid 3df19233-2448-4030-ae1d-a4f98ccffba9 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1677.116662] env[62510]: DEBUG oslo_concurrency.lockutils [None req-13a7af88-00cb-4252-bdea-5da40086a461 tempest-FloatingIPsAssociationTestJSON-1796888155 tempest-FloatingIPsAssociationTestJSON-1796888155-project-member] Lock "b004fba7-13e0-40f0-827d-8d09b7717176" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.059s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.124295] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1677.124295] env[62510]: value = "task-1768932" [ 1677.124295] env[62510]: _type = "Task" [ 1677.124295] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.136622] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768932, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.196695] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768928, 'name': Rename_Task, 'duration_secs': 0.886368} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.196973] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.197381] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-935da4eb-ea0e-49a7-8e60-912dfdaf5bf7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.205261] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1677.205261] env[62510]: value = "task-1768933" [ 1677.205261] env[62510]: _type = "Task" [ 1677.205261] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.213540] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.237576] env[62510]: DEBUG nova.compute.manager [req-d53f4e7b-3cf7-482c-bcb7-fe02c240984c req-36d1de06-ad37-4208-9161-0c929dfc8989 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Received event network-vif-plugged-5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1677.237921] env[62510]: DEBUG oslo_concurrency.lockutils [req-d53f4e7b-3cf7-482c-bcb7-fe02c240984c req-36d1de06-ad37-4208-9161-0c929dfc8989 service nova] Acquiring lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.238225] env[62510]: DEBUG oslo_concurrency.lockutils [req-d53f4e7b-3cf7-482c-bcb7-fe02c240984c req-36d1de06-ad37-4208-9161-0c929dfc8989 service nova] Lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.238507] env[62510]: DEBUG oslo_concurrency.lockutils [req-d53f4e7b-3cf7-482c-bcb7-fe02c240984c req-36d1de06-ad37-4208-9161-0c929dfc8989 service nova] Lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.238752] env[62510]: DEBUG nova.compute.manager [req-d53f4e7b-3cf7-482c-bcb7-fe02c240984c req-36d1de06-ad37-4208-9161-0c929dfc8989 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] No waiting events found dispatching network-vif-plugged-5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1677.239047] env[62510]: WARNING nova.compute.manager [req-d53f4e7b-3cf7-482c-bcb7-fe02c240984c req-36d1de06-ad37-4208-9161-0c929dfc8989 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Received unexpected event network-vif-plugged-5992dff8-0336-4d13-bbe8-2614b9dc96d5 for instance with vm_state building and task_state spawning. [ 1677.259245] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768931, 'name': Rename_Task, 'duration_secs': 0.14769} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.259605] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.259917] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56a9cb11-43f9-4712-9ff0-7b39759872c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.269135] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1677.269135] env[62510]: value = "task-1768934" [ 1677.269135] env[62510]: _type = "Task" [ 1677.269135] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.278476] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.359472] env[62510]: DEBUG nova.network.neutron [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Successfully updated port: 5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1677.636814] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768932, 'name': ReconfigVM_Task, 'duration_secs': 0.417802} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.637502] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c/fae7e580-ab09-4fda-9cbe-0e066ddcb85c.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1677.638052] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3f6e42-5f4f-4d31-baa0-07bb0ee2583a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.665816] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73be488e-4f40-463f-ae6b-39fc66e7e03e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.702530] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f9c90c-388c-4a96-adfb-ac106de4832d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.718238] env[62510]: DEBUG oslo_vmware.api [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1768933, 'name': PowerOnVM_Task, 'duration_secs': 0.505045} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.739724] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1677.743037] env[62510]: INFO nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1677.743037] env[62510]: DEBUG nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1677.743037] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e12525f-4aea-456a-9558-f157e69fe00b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.745205] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96869be-819c-4b18-936a-0b6d036e972f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.756546] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.758348] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb37b85b-482c-4194-8862-ff30b16926e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.773447] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1677.773447] env[62510]: value = "task-1768935" [ 1677.773447] env[62510]: _type = "Task" [ 1677.773447] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.798413] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768935, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.805814] env[62510]: DEBUG oslo_vmware.api [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768934, 'name': PowerOnVM_Task, 'duration_secs': 0.504923} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.810366] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1677.812614] env[62510]: INFO nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1677.812614] env[62510]: DEBUG nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1677.813629] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886ef665-ab80-4918-aea0-f5f4660d8cf5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.865603] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.865822] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.866019] env[62510]: DEBUG nova.network.neutron [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1678.246566] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2ae3e5-e9d4-4697-b253-99920b7a1be4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.256542] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0586ee02-27d5-4418-8576-895ffe6b7ba0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.304360] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8a3a95-4b3b-474a-8d98-40fa4071a467 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.312033] env[62510]: INFO nova.compute.manager [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Took 56.72 seconds to build instance. [ 1678.318685] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768935, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.322380] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48580145-3273-4df3-a0e7-8545fdc65624 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.347019] env[62510]: DEBUG nova.compute.provider_tree [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1678.348466] env[62510]: INFO nova.compute.manager [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Took 55.34 seconds to build instance. [ 1678.435806] env[62510]: DEBUG nova.network.neutron [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1678.729810] env[62510]: DEBUG nova.network.neutron [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.815189] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e51e6e3d-070e-4edc-8cdb-1db94c7c90f5 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.239s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.816140] env[62510]: DEBUG oslo_vmware.api [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768935, 'name': PowerOnVM_Task, 'duration_secs': 0.815335} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.816140] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1678.854439] env[62510]: DEBUG nova.scheduler.client.report [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1678.856758] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb7903f6-436b-4f14-bf13-540403947028 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.859s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.234281] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.234726] env[62510]: DEBUG nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Instance network_info: |[{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1679.235260] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:48:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5992dff8-0336-4d13-bbe8-2614b9dc96d5', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1679.243487] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating folder: Project (bae4f0adee8c4c28add1849316448538). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1679.243883] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ef1fce2-20a3-4fc4-87e8-c60e4be39e22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.258137] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Created folder: Project (bae4f0adee8c4c28add1849316448538) in parent group-v367197. [ 1679.258483] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating folder: Instances. Parent ref: group-v367372. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1679.258633] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d713341-0337-4274-b2ca-23a7db1cd7ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.270451] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Created folder: Instances in parent group-v367372. [ 1679.270629] env[62510]: DEBUG oslo.service.loopingcall [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.270839] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1679.271068] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2633796f-2295-4d91-a1ea-06d7a5a70641 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.296030] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1679.296030] env[62510]: value = "task-1768938" [ 1679.296030] env[62510]: _type = "Task" [ 1679.296030] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.306871] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768938, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.358165] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.245s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.361425] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.018s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.363239] env[62510]: INFO nova.compute.claims [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1679.396311] env[62510]: INFO nova.scheduler.client.report [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleted allocations for instance 3df19233-2448-4030-ae1d-a4f98ccffba9 [ 1679.539260] env[62510]: DEBUG nova.compute.manager [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Received event network-changed-5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1679.539673] env[62510]: DEBUG nova.compute.manager [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Refreshing instance network info cache due to event network-changed-5992dff8-0336-4d13-bbe8-2614b9dc96d5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1679.539745] env[62510]: DEBUG oslo_concurrency.lockutils [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] Acquiring lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.539893] env[62510]: DEBUG oslo_concurrency.lockutils [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] Acquired lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.540431] env[62510]: DEBUG nova.network.neutron [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Refreshing network info cache for port 5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1679.807854] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768938, 'name': CreateVM_Task, 'duration_secs': 0.429152} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.808044] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1679.808762] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.808936] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.809281] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1679.809549] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87a207e4-d6d0-41e7-ba4f-91c49da3431d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.815207] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1679.815207] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b79e45-cce5-083b-59dd-662aa3e30ee7" [ 1679.815207] env[62510]: _type = "Task" [ 1679.815207] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.827295] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b79e45-cce5-083b-59dd-662aa3e30ee7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.833744] env[62510]: INFO nova.compute.manager [None req-c8274095-67e6-47a8-9784-fa21c3010f5d tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance to original state: 'active' [ 1679.908493] env[62510]: DEBUG oslo_concurrency.lockutils [None req-99984399-3df5-42b4-9b86-1a4b7879324f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "3df19233-2448-4030-ae1d-a4f98ccffba9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.041s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.330162] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b79e45-cce5-083b-59dd-662aa3e30ee7, 'name': SearchDatastore_Task, 'duration_secs': 0.039311} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.330490] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.330725] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1680.330955] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1680.331118] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1680.331301] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1680.331574] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08138d93-6580-440d-b459-89ee01845e17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.341667] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1680.341869] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1680.344911] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-336b16bf-09b4-45a7-b8a3-ec863bc58ff9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.351720] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1680.351720] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ac046b-b10c-5d51-6dda-1e18382395cb" [ 1680.351720] env[62510]: _type = "Task" [ 1680.351720] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.360365] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ac046b-b10c-5d51-6dda-1e18382395cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.393121] env[62510]: DEBUG nova.network.neutron [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updated VIF entry in instance network info cache for port 5992dff8-0336-4d13-bbe8-2614b9dc96d5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1680.393550] env[62510]: DEBUG nova.network.neutron [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.647829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "31fe5643-dece-484f-92d6-7c7cafbd51e4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.649027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.649465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "31fe5643-dece-484f-92d6-7c7cafbd51e4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.651676] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.651676] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.653801] env[62510]: INFO nova.compute.manager [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Terminating instance [ 1680.858998] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85147cf1-0dbe-489f-990a-bd2917004b50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.867375] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ac046b-b10c-5d51-6dda-1e18382395cb, 'name': SearchDatastore_Task, 'duration_secs': 0.016592} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.869291] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2f136f2-fa53-4bea-a36c-55117ca0c2d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.874814] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92acea52-580e-4e9f-88e1-f1b56e0a74d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.879799] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1680.879799] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5230f1ae-ea9d-9f58-aaa0-cc91befc0ae7" [ 1680.879799] env[62510]: _type = "Task" [ 1680.879799] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.914518] env[62510]: DEBUG oslo_concurrency.lockutils [req-5db09d57-7632-40d8-99ac-2296e758ffa4 req-73c4a404-2028-42e9-af5d-81070abe2861 service nova] Releasing lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.916203] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c1d3db-4b6f-49d0-88a7-de78b64767d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.923343] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5230f1ae-ea9d-9f58-aaa0-cc91befc0ae7, 'name': SearchDatastore_Task, 'duration_secs': 0.013199} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.924080] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.924335] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 841460b0-d917-44ea-88c6-0e5a3022f658/841460b0-d917-44ea-88c6-0e5a3022f658.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1680.924605] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-260bda02-2390-431f-a16a-c0420cb1c014 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.930450] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f3f837-6737-47a8-aa99-e6e9230fd87a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.936051] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1680.936051] env[62510]: value = "task-1768939" [ 1680.936051] env[62510]: _type = "Task" [ 1680.936051] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.950022] env[62510]: DEBUG nova.compute.provider_tree [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1680.956642] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.158091] env[62510]: DEBUG nova.compute.manager [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1681.159541] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1681.161216] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36994ec5-645e-4afe-8ddb-e293860d2228 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.170469] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1681.170718] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5dd588d1-feb1-4299-a40b-88b4d2485ff3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.177947] env[62510]: DEBUG oslo_vmware.api [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1681.177947] env[62510]: value = "task-1768940" [ 1681.177947] env[62510]: _type = "Task" [ 1681.177947] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.186613] env[62510]: DEBUG oslo_vmware.api [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.447385] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.451526] env[62510]: DEBUG nova.scheduler.client.report [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1681.624184] env[62510]: DEBUG nova.compute.manager [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-changed-e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1681.624511] env[62510]: DEBUG nova.compute.manager [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing instance network info cache due to event network-changed-e0d0d69b-8e64-4722-b7d5-837e5c7482bc. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1681.624719] env[62510]: DEBUG oslo_concurrency.lockutils [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1681.624872] env[62510]: DEBUG oslo_concurrency.lockutils [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.625042] env[62510]: DEBUG nova.network.neutron [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing network info cache for port e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1681.651010] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.651504] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.651885] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.653987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.653987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.659667] env[62510]: INFO nova.compute.manager [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Terminating instance [ 1681.688940] env[62510]: DEBUG oslo_vmware.api [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768940, 'name': PowerOffVM_Task, 'duration_secs': 0.219156} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.689369] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.689615] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1681.689919] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-78eafe74-21e2-4dc6-a275-fa19d82aeef2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.896198] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1681.896801] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1681.896801] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Deleting the datastore file [datastore1] 31fe5643-dece-484f-92d6-7c7cafbd51e4 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1681.897040] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2835dd79-d304-49a7-80e6-b3f2ef612b73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.904668] env[62510]: DEBUG oslo_vmware.api [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for the task: (returnval){ [ 1681.904668] env[62510]: value = "task-1768942" [ 1681.904668] env[62510]: _type = "Task" [ 1681.904668] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.918429] env[62510]: DEBUG oslo_vmware.api [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.949879] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768939, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.956084] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.956763] env[62510]: DEBUG nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1681.959526] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.714s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.960968] env[62510]: INFO nova.compute.claims [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1682.163608] env[62510]: DEBUG nova.compute.manager [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1682.163843] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1682.164803] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad19cd91-1da8-4cb3-9a48-55f83933bbbd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.172924] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1682.175195] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b1f07cc-62b5-4ab6-bdb9-1e591daf3955 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.182299] env[62510]: DEBUG oslo_vmware.api [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1682.182299] env[62510]: value = "task-1768943" [ 1682.182299] env[62510]: _type = "Task" [ 1682.182299] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.191079] env[62510]: DEBUG oslo_vmware.api [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.421092] env[62510]: DEBUG oslo_vmware.api [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Task: {'id': task-1768942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.493878} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.421836] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1682.422161] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1682.422440] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1682.422772] env[62510]: INFO nova.compute.manager [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1682.423732] env[62510]: DEBUG oslo.service.loopingcall [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1682.423732] env[62510]: DEBUG nova.compute.manager [-] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1682.423732] env[62510]: DEBUG nova.network.neutron [-] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1682.449991] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768939, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.469532] env[62510]: DEBUG nova.compute.utils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1682.477477] env[62510]: DEBUG nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1682.481022] env[62510]: DEBUG nova.network.neutron [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1682.540915] env[62510]: DEBUG nova.policy [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0b465ab9caf4d989219f1fbbebd00ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd98518565b744451ba90ba301267213f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1682.621417] env[62510]: DEBUG nova.network.neutron [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updated VIF entry in instance network info cache for port e0d0d69b-8e64-4722-b7d5-837e5c7482bc. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1682.621960] env[62510]: DEBUG nova.network.neutron [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.695643] env[62510]: DEBUG oslo_vmware.api [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768943, 'name': PowerOffVM_Task, 'duration_secs': 0.299613} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.698484] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1682.698484] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1682.698484] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34f680d1-f19b-4755-a3ba-83ac78b5cf58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.791923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1682.791923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1682.791923] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Deleting the datastore file [datastore1] fae7e580-ab09-4fda-9cbe-0e066ddcb85c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1682.792367] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e31df35-66cb-4f7e-92c3-452784a3d000 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.801097] env[62510]: DEBUG oslo_vmware.api [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1682.801097] env[62510]: value = "task-1768945" [ 1682.801097] env[62510]: _type = "Task" [ 1682.801097] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.810577] env[62510]: DEBUG oslo_vmware.api [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.819935] env[62510]: DEBUG nova.network.neutron [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Successfully created port: 825c17f2-947d-4f00-a176-ae94de2a927d {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1682.950454] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768939, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.778173} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.951750] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 841460b0-d917-44ea-88c6-0e5a3022f658/841460b0-d917-44ea-88c6-0e5a3022f658.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1682.952112] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1682.952622] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b34196cc-51f9-43b5-ae77-1ffabe1b565b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.960525] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1682.960525] env[62510]: value = "task-1768946" [ 1682.960525] env[62510]: _type = "Task" [ 1682.960525] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.972449] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.979291] env[62510]: DEBUG nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1683.124821] env[62510]: DEBUG oslo_concurrency.lockutils [req-92a5bd54-28f2-47a8-9a43-9d9c8b1a1837 req-ae897523-7ce4-4909-80fe-f3f8c29f025b service nova] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.313433] env[62510]: DEBUG oslo_vmware.api [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1768945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152094} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.313695] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.313882] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.314071] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.314248] env[62510]: INFO nova.compute.manager [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1683.314486] env[62510]: DEBUG oslo.service.loopingcall [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.314678] env[62510]: DEBUG nova.compute.manager [-] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1683.314775] env[62510]: DEBUG nova.network.neutron [-] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1683.355117] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219321ea-f481-44b9-8cdb-e83303aa019e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.363238] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d4a4c6-3a7b-49ed-bf4b-41883b57ffd8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.394264] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2272ff51-2dbb-4d9c-b783-a439c76930b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.402998] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9420b2-664b-4422-a74f-052307119519 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.421370] env[62510]: DEBUG nova.network.neutron [-] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.423259] env[62510]: DEBUG nova.compute.provider_tree [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.473349] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066379} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.473349] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1683.474245] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88bf693-b8c6-4469-bc84-c8c5e895dbef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.506585] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 841460b0-d917-44ea-88c6-0e5a3022f658/841460b0-d917-44ea-88c6-0e5a3022f658.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1683.509149] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d7461a7-d13f-4839-ac2c-7cb61a15bed8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.545184] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1683.545184] env[62510]: value = "task-1768947" [ 1683.545184] env[62510]: _type = "Task" [ 1683.545184] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.556827] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768947, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.925693] env[62510]: INFO nova.compute.manager [-] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Took 1.50 seconds to deallocate network for instance. [ 1683.929255] env[62510]: DEBUG nova.scheduler.client.report [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1684.009546] env[62510]: DEBUG nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1684.039988] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1684.039988] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1684.039988] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1684.040242] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1684.040966] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1684.040966] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1684.040966] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1684.040966] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1684.041147] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1684.041240] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1684.041426] env[62510]: DEBUG nova.virt.hardware [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1684.042421] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76145cd-0468-45bd-a9df-340edff51b12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.052253] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515fec76-25b3-4ee5-9ea6-250f2a802484 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.059381] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768947, 'name': ReconfigVM_Task, 'duration_secs': 0.306264} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.059916] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 841460b0-d917-44ea-88c6-0e5a3022f658/841460b0-d917-44ea-88c6-0e5a3022f658.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1684.062125] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9baee190-da1a-4bdb-807e-b24bb7bc48b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.078860] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1684.078860] env[62510]: value = "task-1768948" [ 1684.078860] env[62510]: _type = "Task" [ 1684.078860] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.089626] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768948, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.102970] env[62510]: DEBUG nova.compute.manager [req-65504193-a440-4380-9465-e976065b3e04 req-6be571d2-c584-46fa-8692-de24855d4f41 service nova] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Received event network-vif-deleted-3ac61e77-4998-412c-a492-0ae2c6578684 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1684.364444] env[62510]: DEBUG nova.network.neutron [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Successfully updated port: 825c17f2-947d-4f00-a176-ae94de2a927d {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1684.439704] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.440274] env[62510]: DEBUG nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1684.443701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.443992] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 39.669s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.469690] env[62510]: DEBUG nova.compute.manager [req-887f270b-5c8b-4b08-acb8-0638f1bced71 req-94657fdc-d383-465b-9083-68c02ebb35c6 service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Received event network-vif-plugged-825c17f2-947d-4f00-a176-ae94de2a927d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1684.469907] env[62510]: DEBUG oslo_concurrency.lockutils [req-887f270b-5c8b-4b08-acb8-0638f1bced71 req-94657fdc-d383-465b-9083-68c02ebb35c6 service nova] Acquiring lock "16b5d928-94fe-4fd5-9909-775c28d7edd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.470172] env[62510]: DEBUG oslo_concurrency.lockutils [req-887f270b-5c8b-4b08-acb8-0638f1bced71 req-94657fdc-d383-465b-9083-68c02ebb35c6 service nova] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.471155] env[62510]: DEBUG oslo_concurrency.lockutils [req-887f270b-5c8b-4b08-acb8-0638f1bced71 req-94657fdc-d383-465b-9083-68c02ebb35c6 service nova] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.471155] env[62510]: DEBUG nova.compute.manager [req-887f270b-5c8b-4b08-acb8-0638f1bced71 req-94657fdc-d383-465b-9083-68c02ebb35c6 service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] No waiting events found dispatching network-vif-plugged-825c17f2-947d-4f00-a176-ae94de2a927d {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1684.471155] env[62510]: WARNING nova.compute.manager [req-887f270b-5c8b-4b08-acb8-0638f1bced71 req-94657fdc-d383-465b-9083-68c02ebb35c6 service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Received unexpected event network-vif-plugged-825c17f2-947d-4f00-a176-ae94de2a927d for instance with vm_state building and task_state spawning. [ 1684.571831] env[62510]: DEBUG nova.network.neutron [-] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.590102] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768948, 'name': Rename_Task, 'duration_secs': 0.167922} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.590408] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1684.590673] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e2ce6cd-135f-48f1-a458-4565e6c52980 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.599131] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1684.599131] env[62510]: value = "task-1768949" [ 1684.599131] env[62510]: _type = "Task" [ 1684.599131] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.606944] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.867045] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "refresh_cache-16b5d928-94fe-4fd5-9909-775c28d7edd2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.867227] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "refresh_cache-16b5d928-94fe-4fd5-9909-775c28d7edd2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.867605] env[62510]: DEBUG nova.network.neutron [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1684.952049] env[62510]: DEBUG nova.compute.utils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1684.953400] env[62510]: DEBUG nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1684.953576] env[62510]: DEBUG nova.network.neutron [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1685.041991] env[62510]: DEBUG nova.policy [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8a84abd9ffda474fab9a663b57993943', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cefc145c7ef444f7a86b5716b3fbf072', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1685.074224] env[62510]: INFO nova.compute.manager [-] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Took 1.76 seconds to deallocate network for instance. [ 1685.116502] env[62510]: DEBUG oslo_vmware.api [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1768949, 'name': PowerOnVM_Task, 'duration_secs': 0.460522} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.117792] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1685.118030] env[62510]: INFO nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Took 8.62 seconds to spawn the instance on the hypervisor. [ 1685.118293] env[62510]: DEBUG nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1685.123774] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eadeb3e-06d5-4914-ba6d-0cf6fa7382e2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.337895] env[62510]: DEBUG nova.network.neutron [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Successfully created port: 2c2156f3-896a-4dbd-9693-22baadf98a7e {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1685.429210] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e653e2c-0cd0-4bf0-aec1-68bcc7c775de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.432561] env[62510]: DEBUG nova.network.neutron [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1685.440221] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928fb981-601e-4419-b3e1-ef826d89028d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.477724] env[62510]: DEBUG nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1685.485113] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aead0182-ac5f-4351-ab0a-e0f8e848125d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.501020] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc205e3-2b60-4d28-af95-6813ba7d1c4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.514271] env[62510]: DEBUG nova.compute.provider_tree [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.584878] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.651372] env[62510]: INFO nova.compute.manager [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Took 55.69 seconds to build instance. [ 1685.726981] env[62510]: DEBUG nova.network.neutron [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Updating instance_info_cache with network_info: [{"id": "825c17f2-947d-4f00-a176-ae94de2a927d", "address": "fa:16:3e:8f:90:01", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c17f2-94", "ovs_interfaceid": "825c17f2-947d-4f00-a176-ae94de2a927d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.018510] env[62510]: DEBUG nova.scheduler.client.report [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1686.153276] env[62510]: DEBUG nova.compute.manager [req-5a54e244-0e17-44e8-b301-1884016d0c1a req-108fe56f-f602-4667-b31d-24324147e8a4 service nova] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Received event network-vif-deleted-21d515b9-b00f-45cc-9437-318ee6bba755 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1686.153593] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0aa53f50-fb68-4344-b80b-7b2805b7cf4a tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.208s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.231036] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "refresh_cache-16b5d928-94fe-4fd5-9909-775c28d7edd2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.231036] env[62510]: DEBUG nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Instance network_info: |[{"id": "825c17f2-947d-4f00-a176-ae94de2a927d", "address": "fa:16:3e:8f:90:01", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c17f2-94", "ovs_interfaceid": "825c17f2-947d-4f00-a176-ae94de2a927d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1686.231036] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:90:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4e52d8a-b086-4333-a5a1-938680a2d2bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '825c17f2-947d-4f00-a176-ae94de2a927d', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1686.238540] env[62510]: DEBUG oslo.service.loopingcall [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.238759] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1686.238981] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-186c5360-4713-42a0-8495-a7b6b71656cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.261724] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1686.261724] env[62510]: value = "task-1768950" [ 1686.261724] env[62510]: _type = "Task" [ 1686.261724] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.269275] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768950, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.491358] env[62510]: DEBUG nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1686.516776] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1686.517110] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1686.517396] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1686.517544] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1686.517732] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1686.517904] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1686.518179] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1686.518373] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1686.518567] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1686.518774] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1686.518985] env[62510]: DEBUG nova.virt.hardware [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1686.519885] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6782c5b3-f7d7-44e9-b96e-a91f1d0f945c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.532118] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd79603-e024-4dc4-992d-09923be13998 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.663493] env[62510]: DEBUG nova.compute.manager [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Received event network-changed-825c17f2-947d-4f00-a176-ae94de2a927d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1686.663849] env[62510]: DEBUG nova.compute.manager [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Refreshing instance network info cache due to event network-changed-825c17f2-947d-4f00-a176-ae94de2a927d. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1686.664051] env[62510]: DEBUG oslo_concurrency.lockutils [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] Acquiring lock "refresh_cache-16b5d928-94fe-4fd5-9909-775c28d7edd2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.664536] env[62510]: DEBUG oslo_concurrency.lockutils [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] Acquired lock "refresh_cache-16b5d928-94fe-4fd5-9909-775c28d7edd2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.664536] env[62510]: DEBUG nova.network.neutron [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Refreshing network info cache for port 825c17f2-947d-4f00-a176-ae94de2a927d {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1686.774441] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768950, 'name': CreateVM_Task, 'duration_secs': 0.448094} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.774661] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1686.775328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.775495] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.775821] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1686.776103] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f300070-3097-4ce8-9f02-140f8e03e04d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.782672] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1686.782672] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524e34fc-8739-b7c5-595a-c42a55daffde" [ 1686.782672] env[62510]: _type = "Task" [ 1686.782672] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.793536] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524e34fc-8739-b7c5-595a-c42a55daffde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.908212] env[62510]: DEBUG nova.network.neutron [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Successfully updated port: 2c2156f3-896a-4dbd-9693-22baadf98a7e {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.030533] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.586s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.035347] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.435s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.036688] env[62510]: DEBUG nova.objects.instance [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lazy-loading 'resources' on Instance uuid d3e25d50-f315-439b-9e9f-8e454a0631d4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.293669] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524e34fc-8739-b7c5-595a-c42a55daffde, 'name': SearchDatastore_Task, 'duration_secs': 0.010317} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.296590] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.296841] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1687.297085] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.297276] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.297423] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1687.297717] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6646ba3f-bea7-43c6-91b4-731d4f15049e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.308261] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1687.308475] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1687.309268] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f13c87d9-5c4c-4578-af07-2ebb0b6fd5cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.315033] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1687.315033] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524f5516-b2d3-7ff3-ee35-af95d6e68d5e" [ 1687.315033] env[62510]: _type = "Task" [ 1687.315033] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.323757] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524f5516-b2d3-7ff3-ee35-af95d6e68d5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.415082] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "refresh_cache-c829d602-97bc-4ec8-9090-c63bed04ac79" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.415247] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquired lock "refresh_cache-c829d602-97bc-4ec8-9090-c63bed04ac79" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.415417] env[62510]: DEBUG nova.network.neutron [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1687.433746] env[62510]: DEBUG nova.network.neutron [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Updated VIF entry in instance network info cache for port 825c17f2-947d-4f00-a176-ae94de2a927d. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1687.433746] env[62510]: DEBUG nova.network.neutron [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Updating instance_info_cache with network_info: [{"id": "825c17f2-947d-4f00-a176-ae94de2a927d", "address": "fa:16:3e:8f:90:01", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap825c17f2-94", "ovs_interfaceid": "825c17f2-947d-4f00-a176-ae94de2a927d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.595340] env[62510]: INFO nova.scheduler.client.report [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted allocation for migration 09ab0c5c-8b35-4fbb-82b3-775e36c61415 [ 1687.826230] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524f5516-b2d3-7ff3-ee35-af95d6e68d5e, 'name': SearchDatastore_Task, 'duration_secs': 0.010771} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.827115] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd50c138-83ee-418c-b1a8-9916f8c23014 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.833530] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1687.833530] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5242660b-da3e-56f6-72e1-c7abb70687b7" [ 1687.833530] env[62510]: _type = "Task" [ 1687.833530] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.845886] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5242660b-da3e-56f6-72e1-c7abb70687b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.934871] env[62510]: DEBUG oslo_concurrency.lockutils [req-0c56dc0f-5cf8-470e-8fcb-6eb641ca114f req-261bc07e-ed45-4723-94d1-1134ffa8538d service nova] Releasing lock "refresh_cache-16b5d928-94fe-4fd5-9909-775c28d7edd2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.961412] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8bece5-0723-458b-8f6d-76008a97a398 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.969641] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869df0b0-a45f-4803-87ad-4d10b3683444 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.003319] env[62510]: DEBUG nova.network.neutron [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1688.005828] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f9f787-0cdb-441e-9545-3d48416b70e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.014939] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6551354-7c53-4c6e-af77-f66ead7cfcf7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.031410] env[62510]: DEBUG nova.compute.provider_tree [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.104399] env[62510]: DEBUG oslo_concurrency.lockutils [None req-31d48dca-21c2-4cef-b941-496c590f6956 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 46.897s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.240266] env[62510]: DEBUG nova.network.neutron [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Updating instance_info_cache with network_info: [{"id": "2c2156f3-896a-4dbd-9693-22baadf98a7e", "address": "fa:16:3e:4b:d5:b7", "network": {"id": "7d8b2779-800e-4f1f-8b3f-a9b04ab1faf1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-459294831-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cefc145c7ef444f7a86b5716b3fbf072", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2156f3-89", "ovs_interfaceid": "2c2156f3-896a-4dbd-9693-22baadf98a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.349803] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5242660b-da3e-56f6-72e1-c7abb70687b7, 'name': SearchDatastore_Task, 'duration_secs': 0.011445} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.350129] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.350400] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 16b5d928-94fe-4fd5-9909-775c28d7edd2/16b5d928-94fe-4fd5-9909-775c28d7edd2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1688.350675] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ef94783-6378-4a8c-86c4-8165fbac0ed9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.360923] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1688.360923] env[62510]: value = "task-1768951" [ 1688.360923] env[62510]: _type = "Task" [ 1688.360923] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.368707] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.533284] env[62510]: DEBUG nova.scheduler.client.report [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1688.744929] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Releasing lock "refresh_cache-c829d602-97bc-4ec8-9090-c63bed04ac79" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.745302] env[62510]: DEBUG nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Instance network_info: |[{"id": "2c2156f3-896a-4dbd-9693-22baadf98a7e", "address": "fa:16:3e:4b:d5:b7", "network": {"id": "7d8b2779-800e-4f1f-8b3f-a9b04ab1faf1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-459294831-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cefc145c7ef444f7a86b5716b3fbf072", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2156f3-89", "ovs_interfaceid": "2c2156f3-896a-4dbd-9693-22baadf98a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1688.745737] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:d5:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d94740a-bce8-4103-8ecf-230d02ec0a44', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c2156f3-896a-4dbd-9693-22baadf98a7e', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1688.753746] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Creating folder: Project (cefc145c7ef444f7a86b5716b3fbf072). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1688.754072] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-056d5ae5-bd1c-414e-8cf1-1834d2b81a51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.766336] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Created folder: Project (cefc145c7ef444f7a86b5716b3fbf072) in parent group-v367197. [ 1688.766515] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Creating folder: Instances. Parent ref: group-v367376. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1688.766759] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b25f9c41-3ac8-49b2-9c1a-9d2a8dec0297 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.776843] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Created folder: Instances in parent group-v367376. [ 1688.777109] env[62510]: DEBUG oslo.service.loopingcall [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1688.777351] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1688.777569] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c58ee916-a6d7-4b0a-9df4-60320d8455f5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.795764] env[62510]: DEBUG nova.compute.manager [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Received event network-changed-5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1688.795957] env[62510]: DEBUG nova.compute.manager [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Refreshing instance network info cache due to event network-changed-5992dff8-0336-4d13-bbe8-2614b9dc96d5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1688.796222] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Acquiring lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.796381] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Acquired lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.796546] env[62510]: DEBUG nova.network.neutron [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Refreshing network info cache for port 5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1688.803813] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1688.803813] env[62510]: value = "task-1768954" [ 1688.803813] env[62510]: _type = "Task" [ 1688.803813] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.813153] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768954, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.869830] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.038993] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.004s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.041717] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.163s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.043362] env[62510]: INFO nova.compute.claims [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1689.060404] env[62510]: INFO nova.scheduler.client.report [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted allocations for instance d3e25d50-f315-439b-9e9f-8e454a0631d4 [ 1689.314623] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768954, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.370948] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.392605] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.392872] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.393109] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.393307] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.393472] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.400343] env[62510]: INFO nova.compute.manager [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Terminating instance [ 1689.558604] env[62510]: DEBUG nova.network.neutron [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updated VIF entry in instance network info cache for port 5992dff8-0336-4d13-bbe8-2614b9dc96d5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1689.559197] env[62510]: DEBUG nova.network.neutron [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.567628] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aa702396-c40d-41da-9c04-d24ae2af5312 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "d3e25d50-f315-439b-9e9f-8e454a0631d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.970s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.814807] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768954, 'name': CreateVM_Task, 'duration_secs': 0.58352} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.814985] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1689.815695] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.815887] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.816349] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1689.816598] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cfd2181-217f-4768-9a43-a7c417d55047 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.821405] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1689.821405] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527e2ea0-c3a6-e57e-b523-32f7cd82e2ca" [ 1689.821405] env[62510]: _type = "Task" [ 1689.821405] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.829732] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527e2ea0-c3a6-e57e-b523-32f7cd82e2ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.870577] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768951, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.899331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "77f485ae-9c4c-424e-8bac-6d023e428767" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.899652] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.905657] env[62510]: DEBUG nova.compute.manager [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1689.905867] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1689.906751] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd244a5-f266-40f5-8878-f7ed2c73b604 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.915367] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1689.915602] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac1723e4-6a89-445d-9995-95dab6195897 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.923071] env[62510]: DEBUG oslo_vmware.api [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1689.923071] env[62510]: value = "task-1768955" [ 1689.923071] env[62510]: _type = "Task" [ 1689.923071] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.931197] env[62510]: DEBUG oslo_vmware.api [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.062444] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Releasing lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.062708] env[62510]: DEBUG nova.compute.manager [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Received event network-vif-plugged-2c2156f3-896a-4dbd-9693-22baadf98a7e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1690.062917] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Acquiring lock "c829d602-97bc-4ec8-9090-c63bed04ac79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.063180] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.063363] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.063555] env[62510]: DEBUG nova.compute.manager [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] No waiting events found dispatching network-vif-plugged-2c2156f3-896a-4dbd-9693-22baadf98a7e {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1690.063747] env[62510]: WARNING nova.compute.manager [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Received unexpected event network-vif-plugged-2c2156f3-896a-4dbd-9693-22baadf98a7e for instance with vm_state building and task_state spawning. [ 1690.063925] env[62510]: DEBUG nova.compute.manager [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Received event network-changed-2c2156f3-896a-4dbd-9693-22baadf98a7e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1690.064120] env[62510]: DEBUG nova.compute.manager [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Refreshing instance network info cache due to event network-changed-2c2156f3-896a-4dbd-9693-22baadf98a7e. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1690.064406] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Acquiring lock "refresh_cache-c829d602-97bc-4ec8-9090-c63bed04ac79" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.064552] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Acquired lock "refresh_cache-c829d602-97bc-4ec8-9090-c63bed04ac79" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.064713] env[62510]: DEBUG nova.network.neutron [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Refreshing network info cache for port 2c2156f3-896a-4dbd-9693-22baadf98a7e {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1690.334211] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527e2ea0-c3a6-e57e-b523-32f7cd82e2ca, 'name': SearchDatastore_Task, 'duration_secs': 0.010961} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.334536] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.334776] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1690.335008] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.335163] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.335346] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1690.335608] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7310291f-f854-46ae-aa7a-2fbd935038f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.353029] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1690.353235] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1690.353971] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fd7bf38-5058-4d4b-ba6e-f2b716d973a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.359654] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1690.359654] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525c4bd8-8f84-5a4d-e9e3-9520d5c149f8" [ 1690.359654] env[62510]: _type = "Task" [ 1690.359654] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.373083] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525c4bd8-8f84-5a4d-e9e3-9520d5c149f8, 'name': SearchDatastore_Task, 'duration_secs': 0.011124} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.377049] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768951, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.378926] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cdde794-d10e-4bd5-aa51-be1a7de050e2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.385019] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1690.385019] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5256d0a8-776f-28d5-49fe-722f0be5eefd" [ 1690.385019] env[62510]: _type = "Task" [ 1690.385019] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.393177] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5256d0a8-776f-28d5-49fe-722f0be5eefd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.402047] env[62510]: DEBUG nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1690.429133] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0ed1af-9c89-4642-92e8-15d0eaac5623 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.435288] env[62510]: DEBUG oslo_vmware.api [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768955, 'name': PowerOffVM_Task, 'duration_secs': 0.224509} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.436159] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1690.436159] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1690.436496] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67623e5a-2fca-4b3e-9a49-ff58bdeb7bbf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.441643] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c55f52c-39b1-4970-8f49-824ac935f5c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.473337] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212f3bd2-6897-4f1d-9e20-6af4e9ca3976 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.481832] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2aeeae-8f9e-423d-8534-af752aaed756 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.497604] env[62510]: DEBUG nova.compute.provider_tree [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1690.777307] env[62510]: DEBUG nova.network.neutron [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Updated VIF entry in instance network info cache for port 2c2156f3-896a-4dbd-9693-22baadf98a7e. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1690.778446] env[62510]: DEBUG nova.network.neutron [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Updating instance_info_cache with network_info: [{"id": "2c2156f3-896a-4dbd-9693-22baadf98a7e", "address": "fa:16:3e:4b:d5:b7", "network": {"id": "7d8b2779-800e-4f1f-8b3f-a9b04ab1faf1", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-459294831-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cefc145c7ef444f7a86b5716b3fbf072", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d94740a-bce8-4103-8ecf-230d02ec0a44", "external-id": "nsx-vlan-transportzone-149", "segmentation_id": 149, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2156f3-89", "ovs_interfaceid": "2c2156f3-896a-4dbd-9693-22baadf98a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.872022] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768951, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.426329} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.872022] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 16b5d928-94fe-4fd5-9909-775c28d7edd2/16b5d928-94fe-4fd5-9909-775c28d7edd2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1690.872390] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1690.872618] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c508f9a8-63ae-456f-8fbd-a83ae2b6007b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.879548] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1690.879548] env[62510]: value = "task-1768957" [ 1690.879548] env[62510]: _type = "Task" [ 1690.879548] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.891396] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768957, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.901315] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5256d0a8-776f-28d5-49fe-722f0be5eefd, 'name': SearchDatastore_Task, 'duration_secs': 0.011244} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.901712] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.902128] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c829d602-97bc-4ec8-9090-c63bed04ac79/c829d602-97bc-4ec8-9090-c63bed04ac79.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1690.902498] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41ceec14-3df0-4a86-9955-fd64c321cafe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.913419] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1690.913419] env[62510]: value = "task-1768958" [ 1690.913419] env[62510]: _type = "Task" [ 1690.913419] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.921809] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.922732] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.000652] env[62510]: DEBUG nova.scheduler.client.report [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1691.281664] env[62510]: DEBUG oslo_concurrency.lockutils [req-3d4fbbec-c5eb-4c3a-be4e-cccb60953a1f req-d05893d7-626e-4b77-94ff-17b53e5d3047 service nova] Releasing lock "refresh_cache-c829d602-97bc-4ec8-9090-c63bed04ac79" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.392443] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768957, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084675} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.392994] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1691.394236] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7108e017-0679-47c5-84a1-71410bfece42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.427023] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 16b5d928-94fe-4fd5-9909-775c28d7edd2/16b5d928-94fe-4fd5-9909-775c28d7edd2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1691.427023] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-747eb0e7-8d1f-4645-bf29-c8854cf2b1fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.451380] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768958, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.453509] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1691.453509] env[62510]: value = "task-1768959" [ 1691.453509] env[62510]: _type = "Task" [ 1691.453509] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.462685] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768959, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.505856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.464s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.506528] env[62510]: DEBUG nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1691.509440] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.978s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.509713] env[62510]: DEBUG nova.objects.instance [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1691.935919] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768958, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560142} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.936271] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c829d602-97bc-4ec8-9090-c63bed04ac79/c829d602-97bc-4ec8-9090-c63bed04ac79.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1691.936460] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1691.936747] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1288c63-78f8-4c24-9d33-e9fddd88b5cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.946027] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1691.946027] env[62510]: value = "task-1768960" [ 1691.946027] env[62510]: _type = "Task" [ 1691.946027] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.958088] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.968358] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768959, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.015053] env[62510]: DEBUG nova.compute.utils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1692.020159] env[62510]: DEBUG nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1692.020159] env[62510]: DEBUG nova.network.neutron [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1692.063983] env[62510]: DEBUG nova.policy [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6ddc93f8d8ec49bcbb27dec3b8a699b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6cfd45be082f40eb9ccfd136feeabe77', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1692.321100] env[62510]: DEBUG nova.network.neutron [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Successfully created port: db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1692.456296] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067882} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.456715] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1692.457556] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2a7af9-fe1f-48a1-be5a-be2073d2a070 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.468238] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768959, 'name': ReconfigVM_Task, 'duration_secs': 0.539381} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.477570] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 16b5d928-94fe-4fd5-9909-775c28d7edd2/16b5d928-94fe-4fd5-9909-775c28d7edd2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1692.487133] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] c829d602-97bc-4ec8-9090-c63bed04ac79/c829d602-97bc-4ec8-9090-c63bed04ac79.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1692.488120] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4716dd24-41e3-4e75-bc7f-f39c76aa5be3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.489832] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf936974-ac93-4964-a88a-63e5148a7f76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.514602] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1692.514602] env[62510]: value = "task-1768961" [ 1692.514602] env[62510]: _type = "Task" [ 1692.514602] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.514983] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1692.514983] env[62510]: value = "task-1768962" [ 1692.514983] env[62510]: _type = "Task" [ 1692.514983] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.521205] env[62510]: DEBUG nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1692.525068] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d6a7cbd0-aeaa-4bfc-8bc4-42d11109ef76 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.526751] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.958s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.526945] env[62510]: DEBUG nova.objects.instance [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1692.536166] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768961, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.536444] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768962, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.924876] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1692.925190] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1692.925324] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleting the datastore file [datastore1] 350d5f83-d9ce-4997-bf57-70c4a4e22ba0 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1692.925594] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd11b963-2bc3-45ad-90fe-63dd07bd2d01 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.932481] env[62510]: DEBUG oslo_vmware.api [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1692.932481] env[62510]: value = "task-1768963" [ 1692.932481] env[62510]: _type = "Task" [ 1692.932481] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.941745] env[62510]: DEBUG oslo_vmware.api [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.036137] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768962, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.036465] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768961, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.446247] env[62510]: DEBUG oslo_vmware.api [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1768963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184005} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.446247] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1693.446427] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1693.447184] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1693.447184] env[62510]: INFO nova.compute.manager [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Took 3.54 seconds to destroy the instance on the hypervisor. [ 1693.447184] env[62510]: DEBUG oslo.service.loopingcall [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1693.447332] env[62510]: DEBUG nova.compute.manager [-] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1693.447332] env[62510]: DEBUG nova.network.neutron [-] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1693.530154] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768962, 'name': ReconfigVM_Task, 'duration_secs': 0.604257} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.533393] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Reconfigured VM instance instance-00000046 to attach disk [datastore1] c829d602-97bc-4ec8-9090-c63bed04ac79/c829d602-97bc-4ec8-9090-c63bed04ac79.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1693.534032] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768961, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.534275] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83119a14-f86b-421a-ab6f-0e571bc6c22f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.537944] env[62510]: DEBUG nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1693.544017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27507b66-b661-46a7-ac54-677ca775dc8f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.544017] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1693.544017] env[62510]: value = "task-1768964" [ 1693.544017] env[62510]: _type = "Task" [ 1693.544017] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.544017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.686s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.544017] env[62510]: DEBUG nova.objects.instance [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lazy-loading 'resources' on Instance uuid 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.555845] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768964, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.572014] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1693.572347] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1693.572529] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1693.572758] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1693.572943] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1693.573150] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1693.573416] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1693.573631] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1693.573825] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1693.574146] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1693.574378] env[62510]: DEBUG nova.virt.hardware [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1693.575842] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee074921-728b-4679-80e9-556a0049c86f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.584989] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2500ba7-81a7-4a83-bf99-c36bc1f2996f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.743319] env[62510]: DEBUG nova.compute.manager [req-abcc9c13-a855-4d88-8143-9860bab2cd6e req-9d19bc47-4217-4671-a8e4-68104fa7fbdc service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Received event network-vif-plugged-db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1693.743319] env[62510]: DEBUG oslo_concurrency.lockutils [req-abcc9c13-a855-4d88-8143-9860bab2cd6e req-9d19bc47-4217-4671-a8e4-68104fa7fbdc service nova] Acquiring lock "c8e69231-2786-47ac-9a44-c194088b8079-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.743319] env[62510]: DEBUG oslo_concurrency.lockutils [req-abcc9c13-a855-4d88-8143-9860bab2cd6e req-9d19bc47-4217-4671-a8e4-68104fa7fbdc service nova] Lock "c8e69231-2786-47ac-9a44-c194088b8079-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.743557] env[62510]: DEBUG oslo_concurrency.lockutils [req-abcc9c13-a855-4d88-8143-9860bab2cd6e req-9d19bc47-4217-4671-a8e4-68104fa7fbdc service nova] Lock "c8e69231-2786-47ac-9a44-c194088b8079-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.743704] env[62510]: DEBUG nova.compute.manager [req-abcc9c13-a855-4d88-8143-9860bab2cd6e req-9d19bc47-4217-4671-a8e4-68104fa7fbdc service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] No waiting events found dispatching network-vif-plugged-db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1693.743889] env[62510]: WARNING nova.compute.manager [req-abcc9c13-a855-4d88-8143-9860bab2cd6e req-9d19bc47-4217-4671-a8e4-68104fa7fbdc service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Received unexpected event network-vif-plugged-db18d253-3621-4972-a6c9-1f82a650ccb0 for instance with vm_state building and task_state spawning. [ 1693.826546] env[62510]: DEBUG nova.network.neutron [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Successfully updated port: db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1693.884930] env[62510]: DEBUG nova.compute.manager [req-813a459e-180d-4626-9d3c-4637e51255b1 req-4117c89d-0b23-46e7-8c5c-b0d9dd42f5b5 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Received event network-vif-deleted-8597503b-d757-44ff-91a7-6f52b3b75aa3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1693.885569] env[62510]: INFO nova.compute.manager [req-813a459e-180d-4626-9d3c-4637e51255b1 req-4117c89d-0b23-46e7-8c5c-b0d9dd42f5b5 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Neutron deleted interface 8597503b-d757-44ff-91a7-6f52b3b75aa3; detaching it from the instance and deleting it from the info cache [ 1693.885778] env[62510]: DEBUG nova.network.neutron [req-813a459e-180d-4626-9d3c-4637e51255b1 req-4117c89d-0b23-46e7-8c5c-b0d9dd42f5b5 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.027847] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768961, 'name': Rename_Task, 'duration_secs': 1.157311} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.028134] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1694.028398] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95ef1f37-26a1-4fa5-8b7e-fb6cef05eb48 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.036879] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1694.036879] env[62510]: value = "task-1768965" [ 1694.036879] env[62510]: _type = "Task" [ 1694.036879] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.045526] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768965, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.057974] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768964, 'name': Rename_Task, 'duration_secs': 0.142364} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.057974] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1694.058455] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2342a2b0-9630-4c85-8f10-e8981c3fb8b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.064892] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1694.064892] env[62510]: value = "task-1768966" [ 1694.064892] env[62510]: _type = "Task" [ 1694.064892] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.079146] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768966, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.331368] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.331518] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquired lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.331675] env[62510]: DEBUG nova.network.neutron [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1694.358994] env[62510]: DEBUG nova.network.neutron [-] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.388560] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a963445a-f384-42f9-9910-a846bc209de3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.403684] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a880b08-7f9b-4dbd-98cf-06c96942abfc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.452680] env[62510]: DEBUG nova.compute.manager [req-813a459e-180d-4626-9d3c-4637e51255b1 req-4117c89d-0b23-46e7-8c5c-b0d9dd42f5b5 service nova] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Detach interface failed, port_id=8597503b-d757-44ff-91a7-6f52b3b75aa3, reason: Instance 350d5f83-d9ce-4997-bf57-70c4a4e22ba0 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1694.481504] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31981f5-4f97-4734-94b2-52369be20730 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.490306] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76073ad1-8e03-4459-98b5-a57225ad546c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.524848] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2978a28f-5739-4edd-b1c2-59b15f603b23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.533465] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8075f652-d27b-4b05-9b04-c4a1854fc138 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.548226] env[62510]: DEBUG oslo_vmware.api [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768965, 'name': PowerOnVM_Task, 'duration_secs': 0.455319} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.556827] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1694.557108] env[62510]: INFO nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Took 10.55 seconds to spawn the instance on the hypervisor. [ 1694.557361] env[62510]: DEBUG nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1694.557891] env[62510]: DEBUG nova.compute.provider_tree [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.559922] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857378c3-c9db-4748-8a30-e9fb088e7d88 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.579248] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768966, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.861301] env[62510]: INFO nova.compute.manager [-] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Took 1.41 seconds to deallocate network for instance. [ 1694.881276] env[62510]: DEBUG nova.network.neutron [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1695.065307] env[62510]: DEBUG nova.scheduler.client.report [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1695.083425] env[62510]: DEBUG oslo_vmware.api [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768966, 'name': PowerOnVM_Task, 'duration_secs': 0.791309} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.085827] env[62510]: INFO nova.compute.manager [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Took 54.76 seconds to build instance. [ 1695.086836] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1695.086971] env[62510]: INFO nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1695.087176] env[62510]: DEBUG nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1695.089055] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6104ef40-177d-47b5-9793-e9aab274654b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.112792] env[62510]: DEBUG nova.network.neutron [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Updating instance_info_cache with network_info: [{"id": "db18d253-3621-4972-a6c9-1f82a650ccb0", "address": "fa:16:3e:80:65:6a", "network": {"id": "28802dea-043a-4d1b-b938-f1075bb2596a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-760212540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cfd45be082f40eb9ccfd136feeabe77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb18d253-36", "ovs_interfaceid": "db18d253-3621-4972-a6c9-1f82a650ccb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.368306] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.575138] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.577553] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.319s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.579044] env[62510]: INFO nova.compute.claims [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1695.592747] env[62510]: DEBUG oslo_concurrency.lockutils [None req-369203ca-adb3-482b-9744-b9e19b30a5c7 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.275s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.597470] env[62510]: INFO nova.scheduler.client.report [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Deleted allocations for instance 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7 [ 1695.608721] env[62510]: INFO nova.compute.manager [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Took 52.38 seconds to build instance. [ 1695.615829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Releasing lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.616626] env[62510]: DEBUG nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Instance network_info: |[{"id": "db18d253-3621-4972-a6c9-1f82a650ccb0", "address": "fa:16:3e:80:65:6a", "network": {"id": "28802dea-043a-4d1b-b938-f1075bb2596a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-760212540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cfd45be082f40eb9ccfd136feeabe77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb18d253-36", "ovs_interfaceid": "db18d253-3621-4972-a6c9-1f82a650ccb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1695.616626] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:65:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5bd281ed-ae39-485f-90ee-4ee27994b5b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db18d253-3621-4972-a6c9-1f82a650ccb0', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1695.624400] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Creating folder: Project (6cfd45be082f40eb9ccfd136feeabe77). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1695.625302] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-931944c3-51f7-435e-80ba-85044b9f7989 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.641238] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Created folder: Project (6cfd45be082f40eb9ccfd136feeabe77) in parent group-v367197. [ 1695.641360] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Creating folder: Instances. Parent ref: group-v367379. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1695.641601] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a772d31-9803-4461-8216-d4e9fd5c2e9a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.654139] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Created folder: Instances in parent group-v367379. [ 1695.654404] env[62510]: DEBUG oslo.service.loopingcall [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.654837] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1695.655076] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edb2c5d5-5908-4ace-b364-bb2f1de90769 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.676036] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1695.676036] env[62510]: value = "task-1768969" [ 1695.676036] env[62510]: _type = "Task" [ 1695.676036] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.684831] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768969, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.070700] env[62510]: DEBUG nova.compute.manager [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Received event network-changed-db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1696.070902] env[62510]: DEBUG nova.compute.manager [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Refreshing instance network info cache due to event network-changed-db18d253-3621-4972-a6c9-1f82a650ccb0. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1696.071080] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] Acquiring lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.071229] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] Acquired lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.071395] env[62510]: DEBUG nova.network.neutron [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Refreshing network info cache for port db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1696.104345] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fca9916-3033-4ff2-986f-312b9b67bcb1 tempest-ServerShowV254Test-917743341 tempest-ServerShowV254Test-917743341-project-member] Lock "4d622ed5-5f6f-46ca-bc4a-efb32f452cb7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.590s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.110128] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bbfce42c-489a-42e1-8492-d4befac2ca74 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.897s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.185749] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768969, 'name': CreateVM_Task, 'duration_secs': 0.33282} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.185925] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1696.186664] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.186828] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.187168] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1696.187430] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b8ca77b-9ea9-46e8-921f-ac1dde11e60b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.192717] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1696.192717] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52708b3d-e2da-824d-1bd3-471df19a6316" [ 1696.192717] env[62510]: _type = "Task" [ 1696.192717] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.201129] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52708b3d-e2da-824d-1bd3-471df19a6316, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.363038] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.363316] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.363505] env[62510]: INFO nova.compute.manager [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Shelving [ 1696.638227] env[62510]: DEBUG nova.compute.manager [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1696.639199] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca76ac02-a3c1-401b-ad9d-94eada4436e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.661840] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "c829d602-97bc-4ec8-9090-c63bed04ac79" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.662152] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.662324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "c829d602-97bc-4ec8-9090-c63bed04ac79-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.662507] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.663369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.665240] env[62510]: INFO nova.compute.manager [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Terminating instance [ 1696.706879] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52708b3d-e2da-824d-1bd3-471df19a6316, 'name': SearchDatastore_Task, 'duration_secs': 0.010516} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.706979] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1696.708030] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1696.708030] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1696.708030] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.708030] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1696.708549] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3087eee-34c6-43a5-bf71-9c6e4a0a9464 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.719912] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1696.720113] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1696.723033] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-972351a2-4ccf-431b-9261-c182effc4058 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.729893] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1696.729893] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e88a3f-aecd-538f-5314-79802810a773" [ 1696.729893] env[62510]: _type = "Task" [ 1696.729893] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.742767] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e88a3f-aecd-538f-5314-79802810a773, 'name': SearchDatastore_Task, 'duration_secs': 0.010892} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.743559] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d45c171d-5890-45c5-92cb-49fdb78c59ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.751532] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1696.751532] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52599446-f6a5-9934-5240-0d08d56c1456" [ 1696.751532] env[62510]: _type = "Task" [ 1696.751532] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.759936] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52599446-f6a5-9934-5240-0d08d56c1456, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.820588] env[62510]: DEBUG nova.network.neutron [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Updated VIF entry in instance network info cache for port db18d253-3621-4972-a6c9-1f82a650ccb0. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1696.820874] env[62510]: DEBUG nova.network.neutron [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Updating instance_info_cache with network_info: [{"id": "db18d253-3621-4972-a6c9-1f82a650ccb0", "address": "fa:16:3e:80:65:6a", "network": {"id": "28802dea-043a-4d1b-b938-f1075bb2596a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-760212540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cfd45be082f40eb9ccfd136feeabe77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb18d253-36", "ovs_interfaceid": "db18d253-3621-4972-a6c9-1f82a650ccb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.997565] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca31bd4-644a-4909-bf79-5a7fb4f1a5c7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.006319] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ac0f5f-d45f-4e07-9d1a-af5738b8647c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.038666] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f6f094-da01-42eb-9692-35508459de86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.047625] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f275a45f-06eb-4773-9f02-686f6d342473 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.062618] env[62510]: DEBUG nova.compute.provider_tree [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1697.151781] env[62510]: INFO nova.compute.manager [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] instance snapshotting [ 1697.155101] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c91e7a0-ae96-416d-9248-3c87fd6e41f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.176595] env[62510]: DEBUG nova.compute.manager [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1697.176917] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1697.177718] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7a6287-5216-4e53-af81-3aeef773a228 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.180797] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab757729-dab3-4829-9c7b-3d7defc7a5f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.193085] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1697.193353] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e2d2168b-5d31-4cb5-a4aa-b808e2aa52d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.204036] env[62510]: DEBUG oslo_vmware.api [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1697.204036] env[62510]: value = "task-1768970" [ 1697.204036] env[62510]: _type = "Task" [ 1697.204036] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.213602] env[62510]: DEBUG oslo_vmware.api [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768970, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.263237] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52599446-f6a5-9934-5240-0d08d56c1456, 'name': SearchDatastore_Task, 'duration_secs': 0.010603} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.263513] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.263769] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c8e69231-2786-47ac-9a44-c194088b8079/c8e69231-2786-47ac-9a44-c194088b8079.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1697.264057] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f06aed2-9a4d-49c0-a453-1f106bc62aed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.277045] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1697.277045] env[62510]: value = "task-1768971" [ 1697.277045] env[62510]: _type = "Task" [ 1697.277045] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.288409] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.323484] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f800b55-37d8-4018-bcab-77b41667079f req-a74c4ae7-3e26-42a2-ad7b-70f66a38b58b service nova] Releasing lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1697.376625] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1697.376857] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9df44dba-bc54-4aee-8686-aca01393e4dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.386870] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1697.386870] env[62510]: value = "task-1768972" [ 1697.386870] env[62510]: _type = "Task" [ 1697.386870] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.399354] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.566529] env[62510]: DEBUG nova.scheduler.client.report [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1697.693675] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1697.694034] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a605eb2a-1255-4f25-ad53-ed894d339df0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.703686] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1697.703686] env[62510]: value = "task-1768973" [ 1697.703686] env[62510]: _type = "Task" [ 1697.703686] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.716837] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768973, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.719726] env[62510]: DEBUG oslo_vmware.api [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768970, 'name': PowerOffVM_Task, 'duration_secs': 0.21482} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.720011] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1697.720207] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1697.720480] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4e038a9-10f8-48d4-aef8-ca2c213a5056 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.788905] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768971, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.818899] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1697.819256] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1697.819491] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Deleting the datastore file [datastore1] c829d602-97bc-4ec8-9090-c63bed04ac79 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1697.819812] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3873bc51-e172-4fc2-a6a0-b65850048347 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.827819] env[62510]: DEBUG oslo_vmware.api [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for the task: (returnval){ [ 1697.827819] env[62510]: value = "task-1768975" [ 1697.827819] env[62510]: _type = "Task" [ 1697.827819] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.836502] env[62510]: DEBUG oslo_vmware.api [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.897537] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768972, 'name': PowerOffVM_Task, 'duration_secs': 0.183541} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.897898] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1697.898758] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a293dd6-c247-45ca-a367-9fad9e6320a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.918701] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b9e6583-b826-4036-8a69-775b1afd84ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.071911] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.072897] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.868s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.073153] env[62510]: DEBUG nova.objects.instance [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lazy-loading 'resources' on Instance uuid 0d27da5c-20f3-4df1-86d2-036c904fd657 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1698.213599] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768973, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.288817] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768971, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558041} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.289558] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c8e69231-2786-47ac-9a44-c194088b8079/c8e69231-2786-47ac-9a44-c194088b8079.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1698.289558] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1698.289724] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92a2b1cc-1d52-4b47-bf86-a73a924230b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.297215] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1698.297215] env[62510]: value = "task-1768976" [ 1698.297215] env[62510]: _type = "Task" [ 1698.297215] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.306491] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.339732] env[62510]: DEBUG oslo_vmware.api [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Task: {'id': task-1768975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1659} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.339732] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1698.339732] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1698.339732] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1698.339960] env[62510]: INFO nova.compute.manager [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1698.340055] env[62510]: DEBUG oslo.service.loopingcall [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1698.340346] env[62510]: DEBUG nova.compute.manager [-] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1698.340472] env[62510]: DEBUG nova.network.neutron [-] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1698.430019] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1698.430349] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-05419c47-6620-4b9c-9cdf-b2b4426547ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.439492] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1698.439492] env[62510]: value = "task-1768977" [ 1698.439492] env[62510]: _type = "Task" [ 1698.439492] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.449228] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768977, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.576434] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "3490a629-0f73-4ba9-b48b-a25787219beb" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.576746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "3490a629-0f73-4ba9-b48b-a25787219beb" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.720058] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768973, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.811485] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.286107} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.811757] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1698.812560] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d46a34-b917-4ea3-a7bd-f2a06f403a3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.835385] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] c8e69231-2786-47ac-9a44-c194088b8079/c8e69231-2786-47ac-9a44-c194088b8079.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1698.838223] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-157a6e54-ea12-4910-b190-4adfe818cafa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.855946] env[62510]: DEBUG nova.compute.manager [req-90d1ec39-1e4e-4c25-9f3f-3602ca7f4434 req-2d8d481b-3333-4341-89b4-12c30a434af7 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Received event network-vif-deleted-2c2156f3-896a-4dbd-9693-22baadf98a7e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1698.856211] env[62510]: INFO nova.compute.manager [req-90d1ec39-1e4e-4c25-9f3f-3602ca7f4434 req-2d8d481b-3333-4341-89b4-12c30a434af7 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Neutron deleted interface 2c2156f3-896a-4dbd-9693-22baadf98a7e; detaching it from the instance and deleting it from the info cache [ 1698.856545] env[62510]: DEBUG nova.network.neutron [req-90d1ec39-1e4e-4c25-9f3f-3602ca7f4434 req-2d8d481b-3333-4341-89b4-12c30a434af7 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.864375] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1698.864375] env[62510]: value = "task-1768978" [ 1698.864375] env[62510]: _type = "Task" [ 1698.864375] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.879203] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.951511] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768977, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.017698] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b371f5f3-fa8a-4e98-8a44-6829b36f146b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.025997] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a006fb33-2968-4b6a-acd5-e33743c23803 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.058676] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1261601-2367-4189-8c97-e17a7dd3f599 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.067205] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3fd8317-119b-43b8-a527-9b60926f7679 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.081503] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "3490a629-0f73-4ba9-b48b-a25787219beb" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.082143] env[62510]: DEBUG nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1699.084824] env[62510]: DEBUG nova.compute.provider_tree [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.218104] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768973, 'name': CreateSnapshot_Task, 'duration_secs': 1.49503} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.218104] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1699.218992] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a22eb1c-e617-4bd4-b1b8-83e500313bc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.233545] env[62510]: DEBUG nova.network.neutron [-] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.360279] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a106ac3-44ab-4d96-a73d-fa0f464a82b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.375658] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d620896-ad53-4291-9404-2edc3a77cc53 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.395515] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768978, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.417383] env[62510]: DEBUG nova.compute.manager [req-90d1ec39-1e4e-4c25-9f3f-3602ca7f4434 req-2d8d481b-3333-4341-89b4-12c30a434af7 service nova] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Detach interface failed, port_id=2c2156f3-896a-4dbd-9693-22baadf98a7e, reason: Instance c829d602-97bc-4ec8-9090-c63bed04ac79 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1699.451010] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768977, 'name': CreateSnapshot_Task, 'duration_secs': 0.941898} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.451300] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1699.452088] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdff08a2-b94b-4fca-ada0-650f76ce8ab8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.588266] env[62510]: DEBUG nova.compute.utils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1699.590094] env[62510]: DEBUG nova.scheduler.client.report [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1699.593305] env[62510]: DEBUG nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1699.593486] env[62510]: DEBUG nova.network.neutron [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1699.659343] env[62510]: DEBUG nova.policy [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f7f8be4e4b5d4e5eba23ca22c0c64911', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7b7083f97b844289b07cf4af3bf3765', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1699.743529] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1699.743529] env[62510]: INFO nova.compute.manager [-] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Took 1.40 seconds to deallocate network for instance. [ 1699.743529] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2e63db01-d68a-472a-9c27-943c58e83835 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.755259] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1699.755259] env[62510]: value = "task-1768979" [ 1699.755259] env[62510]: _type = "Task" [ 1699.755259] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.765826] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768979, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.878458] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768978, 'name': ReconfigVM_Task, 'duration_secs': 0.579329} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.878667] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Reconfigured VM instance instance-00000047 to attach disk [datastore1] c8e69231-2786-47ac-9a44-c194088b8079/c8e69231-2786-47ac-9a44-c194088b8079.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1699.879328] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-338dcd24-c3bf-412d-9f47-4cd458d4ebcb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.887448] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1699.887448] env[62510]: value = "task-1768980" [ 1699.887448] env[62510]: _type = "Task" [ 1699.887448] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.898714] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768980, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.970855] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1699.971093] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-03d86bc2-52a3-4da0-a9a5-043eb2855b4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.975705] env[62510]: DEBUG nova.network.neutron [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Successfully created port: c065c584-69ab-4d52-8de2-906db2d8c438 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1699.985212] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1699.985212] env[62510]: value = "task-1768981" [ 1699.985212] env[62510]: _type = "Task" [ 1699.985212] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.997186] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768981, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.094065] env[62510]: DEBUG nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1700.097826] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.101134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.712s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.101134] env[62510]: DEBUG nova.objects.instance [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'pci_requests' on Instance uuid 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.125411] env[62510]: INFO nova.scheduler.client.report [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted allocations for instance 0d27da5c-20f3-4df1-86d2-036c904fd657 [ 1700.255829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.265750] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768979, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.397675] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768980, 'name': Rename_Task, 'duration_secs': 0.309705} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.397954] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1700.398233] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5edc92e-204e-463a-96d2-13e415069e81 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.405700] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1700.405700] env[62510]: value = "task-1768982" [ 1700.405700] env[62510]: _type = "Task" [ 1700.405700] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.422108] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.496016] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768981, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.607756] env[62510]: DEBUG nova.objects.instance [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'numa_topology' on Instance uuid 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.633639] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74f9e27a-caad-46f9-b6d0-6aa789f4e49f tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "0d27da5c-20f3-4df1-86d2-036c904fd657" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.764s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.770281] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768979, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.916722] env[62510]: DEBUG oslo_vmware.api [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1768982, 'name': PowerOnVM_Task, 'duration_secs': 0.492381} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.917060] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1700.917280] env[62510]: INFO nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Took 7.38 seconds to spawn the instance on the hypervisor. [ 1700.917518] env[62510]: DEBUG nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1700.918301] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c35688-2186-40e3-b9b4-e5f4621d1b33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.997572] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768981, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.110764] env[62510]: DEBUG nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1701.111535] env[62510]: INFO nova.compute.claims [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1701.134447] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1701.134715] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1701.134887] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.135087] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1701.135235] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.135381] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1701.135595] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1701.135759] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1701.135977] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1701.136179] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1701.136422] env[62510]: DEBUG nova.virt.hardware [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1701.137898] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4fb152-cdc4-4cd6-a613-ac1590d8f2b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.146790] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-755425f0-4e29-4d7a-946a-d6c73d0c8e1e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.267379] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1768979, 'name': CloneVM_Task, 'duration_secs': 1.180706} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.267739] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Created linked-clone VM from snapshot [ 1701.268614] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac769edd-145d-4b54-81b8-43a186bfbbda {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.277524] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Uploading image f6223ac6-801a-4cf1-b252-449e54e92fe4 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1701.299403] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1701.299403] env[62510]: value = "vm-367384" [ 1701.299403] env[62510]: _type = "VirtualMachine" [ 1701.299403] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1701.299687] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e75d28f7-9344-462f-b319-957d73cbe1cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.308321] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease: (returnval){ [ 1701.308321] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52219ce4-d29c-3591-32a8-5925d0412f83" [ 1701.308321] env[62510]: _type = "HttpNfcLease" [ 1701.308321] env[62510]: } obtained for exporting VM: (result){ [ 1701.308321] env[62510]: value = "vm-367384" [ 1701.308321] env[62510]: _type = "VirtualMachine" [ 1701.308321] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1701.308646] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the lease: (returnval){ [ 1701.308646] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52219ce4-d29c-3591-32a8-5925d0412f83" [ 1701.308646] env[62510]: _type = "HttpNfcLease" [ 1701.308646] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1701.316555] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1701.316555] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52219ce4-d29c-3591-32a8-5925d0412f83" [ 1701.316555] env[62510]: _type = "HttpNfcLease" [ 1701.316555] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1701.439691] env[62510]: INFO nova.compute.manager [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Took 54.59 seconds to build instance. [ 1701.497445] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768981, 'name': CloneVM_Task, 'duration_secs': 1.323051} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.497745] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Created linked-clone VM from snapshot [ 1701.498547] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92530786-f829-4582-8486-a0aa5539296b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.508647] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Uploading image 37ce35c0-4c91-45fb-b27b-04201e3f0d27 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1701.514173] env[62510]: DEBUG nova.compute.manager [req-0d54f2d6-2668-4eb0-9286-4b4be2c61768 req-ccbd79ab-392f-4623-8ab9-97ceb19404f4 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Received event network-vif-plugged-c065c584-69ab-4d52-8de2-906db2d8c438 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1701.514489] env[62510]: DEBUG oslo_concurrency.lockutils [req-0d54f2d6-2668-4eb0-9286-4b4be2c61768 req-ccbd79ab-392f-4623-8ab9-97ceb19404f4 service nova] Acquiring lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.514705] env[62510]: DEBUG oslo_concurrency.lockutils [req-0d54f2d6-2668-4eb0-9286-4b4be2c61768 req-ccbd79ab-392f-4623-8ab9-97ceb19404f4 service nova] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.514873] env[62510]: DEBUG oslo_concurrency.lockutils [req-0d54f2d6-2668-4eb0-9286-4b4be2c61768 req-ccbd79ab-392f-4623-8ab9-97ceb19404f4 service nova] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.515076] env[62510]: DEBUG nova.compute.manager [req-0d54f2d6-2668-4eb0-9286-4b4be2c61768 req-ccbd79ab-392f-4623-8ab9-97ceb19404f4 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] No waiting events found dispatching network-vif-plugged-c065c584-69ab-4d52-8de2-906db2d8c438 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1701.515258] env[62510]: WARNING nova.compute.manager [req-0d54f2d6-2668-4eb0-9286-4b4be2c61768 req-ccbd79ab-392f-4623-8ab9-97ceb19404f4 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Received unexpected event network-vif-plugged-c065c584-69ab-4d52-8de2-906db2d8c438 for instance with vm_state building and task_state spawning. [ 1701.532584] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1701.532584] env[62510]: value = "vm-367385" [ 1701.532584] env[62510]: _type = "VirtualMachine" [ 1701.532584] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1701.533105] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c00c4008-6f5e-48be-b28e-4c8e617f281f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.541547] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lease: (returnval){ [ 1701.541547] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d92d51-42fb-1b2f-598e-4cd4907f1c27" [ 1701.541547] env[62510]: _type = "HttpNfcLease" [ 1701.541547] env[62510]: } obtained for exporting VM: (result){ [ 1701.541547] env[62510]: value = "vm-367385" [ 1701.541547] env[62510]: _type = "VirtualMachine" [ 1701.541547] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1701.541777] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the lease: (returnval){ [ 1701.541777] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d92d51-42fb-1b2f-598e-4cd4907f1c27" [ 1701.541777] env[62510]: _type = "HttpNfcLease" [ 1701.541777] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1701.548226] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1701.548226] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d92d51-42fb-1b2f-598e-4cd4907f1c27" [ 1701.548226] env[62510]: _type = "HttpNfcLease" [ 1701.548226] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1701.595311] env[62510]: DEBUG nova.network.neutron [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Successfully updated port: c065c584-69ab-4d52-8de2-906db2d8c438 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1701.817216] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1701.817216] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52219ce4-d29c-3591-32a8-5925d0412f83" [ 1701.817216] env[62510]: _type = "HttpNfcLease" [ 1701.817216] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1701.817486] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1701.817486] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52219ce4-d29c-3591-32a8-5925d0412f83" [ 1701.817486] env[62510]: _type = "HttpNfcLease" [ 1701.817486] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1701.818276] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec36449-7b1a-468d-b370-791525a25ef5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.826029] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b0a12-787b-984a-5b85-6467e01a70ce/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1701.826215] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b0a12-787b-984a-5b85-6467e01a70ce/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1701.886740] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.887150] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.887374] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.887561] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.887743] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.891143] env[62510]: INFO nova.compute.manager [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Terminating instance [ 1701.941500] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ee56f651-6d1d-4918-8024-fbf579b7c5a2 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "c8e69231-2786-47ac-9a44-c194088b8079" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.097s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.025625] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-925206ec-e5a3-45bc-9f91-f18f01398725 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.052297] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1702.052297] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d92d51-42fb-1b2f-598e-4cd4907f1c27" [ 1702.052297] env[62510]: _type = "HttpNfcLease" [ 1702.052297] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1702.052608] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1702.052608] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d92d51-42fb-1b2f-598e-4cd4907f1c27" [ 1702.052608] env[62510]: _type = "HttpNfcLease" [ 1702.052608] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1702.053751] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ece41f-f987-4818-bdb1-f2174f711558 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.071136] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a3b75-30c4-0262-2583-5783611b8fba/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1702.071334] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a3b75-30c4-0262-2583-5783611b8fba/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1702.129723] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "refresh_cache-1d644c4f-1fd4-4251-aeef-5777d3f4b94c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.129912] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquired lock "refresh_cache-1d644c4f-1fd4-4251-aeef-5777d3f4b94c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.130690] env[62510]: DEBUG nova.network.neutron [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1702.212087] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bed62502-b037-426a-8784-d9a663d7700e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.396560] env[62510]: DEBUG nova.compute.manager [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1702.397979] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1702.399112] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491aeaee-c0fc-4e0f-88e1-382ce31332d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.409122] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.409481] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d458d7d-aebd-4a6b-9caf-834b5763d4b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.424526] env[62510]: DEBUG oslo_vmware.api [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1702.424526] env[62510]: value = "task-1768985" [ 1702.424526] env[62510]: _type = "Task" [ 1702.424526] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.443215] env[62510]: DEBUG oslo_vmware.api [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.619558] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b92b75-33ae-4d67-a3e6-19170842b638 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.630999] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edc5531-55fc-487e-8fd5-b2258500eef8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.673282] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceed2a22-d863-444c-8451-b411f53e12af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.682662] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf962a12-71d4-49dd-82e2-e8eea4d6b27c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.699850] env[62510]: DEBUG nova.compute.provider_tree [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1702.702941] env[62510]: DEBUG nova.network.neutron [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1702.881580] env[62510]: DEBUG nova.network.neutron [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Updating instance_info_cache with network_info: [{"id": "c065c584-69ab-4d52-8de2-906db2d8c438", "address": "fa:16:3e:f1:68:4a", "network": {"id": "923366dd-0de1-4025-aec7-cc37f879bd7e", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1090424887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7b7083f97b844289b07cf4af3bf3765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc065c584-69", "ovs_interfaceid": "c065c584-69ab-4d52-8de2-906db2d8c438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.937716] env[62510]: DEBUG oslo_vmware.api [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768985, 'name': PowerOffVM_Task, 'duration_secs': 0.420535} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.940694] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1702.940694] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1702.940694] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76d6e8e9-9f78-4f59-82ac-7e1ca276c235 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.986223] env[62510]: DEBUG nova.compute.manager [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Received event network-changed-db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1702.986599] env[62510]: DEBUG nova.compute.manager [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Refreshing instance network info cache due to event network-changed-db18d253-3621-4972-a6c9-1f82a650ccb0. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1702.986933] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] Acquiring lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.987253] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] Acquired lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.987372] env[62510]: DEBUG nova.network.neutron [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Refreshing network info cache for port db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1703.207396] env[62510]: DEBUG nova.scheduler.client.report [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1703.385562] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Releasing lock "refresh_cache-1d644c4f-1fd4-4251-aeef-5777d3f4b94c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.385898] env[62510]: DEBUG nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Instance network_info: |[{"id": "c065c584-69ab-4d52-8de2-906db2d8c438", "address": "fa:16:3e:f1:68:4a", "network": {"id": "923366dd-0de1-4025-aec7-cc37f879bd7e", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1090424887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7b7083f97b844289b07cf4af3bf3765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc065c584-69", "ovs_interfaceid": "c065c584-69ab-4d52-8de2-906db2d8c438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1703.386471] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:68:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '706c9762-1cf8-4770-897d-377d0d927773', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c065c584-69ab-4d52-8de2-906db2d8c438', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1703.394231] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Creating folder: Project (a7b7083f97b844289b07cf4af3bf3765). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1703.394636] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0bd1244a-3a17-40d9-8ee2-c114b9c70b03 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.409163] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Created folder: Project (a7b7083f97b844289b07cf4af3bf3765) in parent group-v367197. [ 1703.409633] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Creating folder: Instances. Parent ref: group-v367386. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1703.410091] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8aa23bd-d8ea-4735-8f73-85e04fbbcaa7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.425246] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Created folder: Instances in parent group-v367386. [ 1703.425802] env[62510]: DEBUG oslo.service.loopingcall [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1703.426150] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1703.426571] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89b26ef4-ed2b-4208-8cab-3f5d9c2c1d87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.456122] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1703.456122] env[62510]: value = "task-1768989" [ 1703.456122] env[62510]: _type = "Task" [ 1703.456122] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.467614] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768989, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.651658] env[62510]: DEBUG nova.compute.manager [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Received event network-changed-c065c584-69ab-4d52-8de2-906db2d8c438 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1703.653368] env[62510]: DEBUG nova.compute.manager [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Refreshing instance network info cache due to event network-changed-c065c584-69ab-4d52-8de2-906db2d8c438. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1703.653601] env[62510]: DEBUG oslo_concurrency.lockutils [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] Acquiring lock "refresh_cache-1d644c4f-1fd4-4251-aeef-5777d3f4b94c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.653802] env[62510]: DEBUG oslo_concurrency.lockutils [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] Acquired lock "refresh_cache-1d644c4f-1fd4-4251-aeef-5777d3f4b94c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.654012] env[62510]: DEBUG nova.network.neutron [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Refreshing network info cache for port c065c584-69ab-4d52-8de2-906db2d8c438 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1703.713283] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.612s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.716441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 39.625s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.716813] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.717046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1703.717599] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.961s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.719507] env[62510]: INFO nova.compute.claims [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1703.722959] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e154f1b8-3f9e-4a6c-9b74-81a393b9b557 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.738019] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f54ca3-ef09-458d-a29a-5d1f4c763746 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.756480] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cc7a70-dda2-4e3a-854e-8698b75b4db0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.760286] env[62510]: INFO nova.network.neutron [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating port d6ee81d1-3abc-4d5e-a8ca-658407cbd553 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1703.773845] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a1c20d-541c-4452-8292-bc67f63537bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.823735] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178590MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1703.824029] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.907040] env[62510]: DEBUG nova.network.neutron [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Updated VIF entry in instance network info cache for port db18d253-3621-4972-a6c9-1f82a650ccb0. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.907499] env[62510]: DEBUG nova.network.neutron [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Updating instance_info_cache with network_info: [{"id": "db18d253-3621-4972-a6c9-1f82a650ccb0", "address": "fa:16:3e:80:65:6a", "network": {"id": "28802dea-043a-4d1b-b938-f1075bb2596a", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-760212540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6cfd45be082f40eb9ccfd136feeabe77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5bd281ed-ae39-485f-90ee-4ee27994b5b0", "external-id": "nsx-vlan-transportzone-305", "segmentation_id": 305, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb18d253-36", "ovs_interfaceid": "db18d253-3621-4972-a6c9-1f82a650ccb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.966210] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768989, 'name': CreateVM_Task, 'duration_secs': 0.49356} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.966408] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1703.967209] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.967434] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.967858] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1703.968197] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-905a654d-0691-44f8-b2ad-c20270e69196 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.973693] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1703.973693] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5279d6d8-b9ce-e218-2c6c-0ef7f515bccc" [ 1703.973693] env[62510]: _type = "Task" [ 1703.973693] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.982267] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5279d6d8-b9ce-e218-2c6c-0ef7f515bccc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.410902] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1704.411319] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1704.411869] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleting the datastore file [datastore1] ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1704.412379] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcb24d1c-1244-48f0-8154-dd7d3ea732af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.415054] env[62510]: DEBUG oslo_concurrency.lockutils [req-b9f97359-3148-4861-8faf-b526e6273c79 req-928002c6-886e-42e0-8da5-edec990e27c1 service nova] Releasing lock "refresh_cache-c8e69231-2786-47ac-9a44-c194088b8079" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.422843] env[62510]: DEBUG oslo_vmware.api [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1704.422843] env[62510]: value = "task-1768990" [ 1704.422843] env[62510]: _type = "Task" [ 1704.422843] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.436586] env[62510]: DEBUG oslo_vmware.api [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.485226] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5279d6d8-b9ce-e218-2c6c-0ef7f515bccc, 'name': SearchDatastore_Task, 'duration_secs': 0.013869} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.485561] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.485837] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1704.486107] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.486331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.486547] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1704.486831] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae74bcdc-b344-4e13-af14-ce029d737698 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.489597] env[62510]: DEBUG nova.network.neutron [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Updated VIF entry in instance network info cache for port c065c584-69ab-4d52-8de2-906db2d8c438. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1704.489934] env[62510]: DEBUG nova.network.neutron [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Updating instance_info_cache with network_info: [{"id": "c065c584-69ab-4d52-8de2-906db2d8c438", "address": "fa:16:3e:f1:68:4a", "network": {"id": "923366dd-0de1-4025-aec7-cc37f879bd7e", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1090424887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7b7083f97b844289b07cf4af3bf3765", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "706c9762-1cf8-4770-897d-377d0d927773", "external-id": "nsx-vlan-transportzone-402", "segmentation_id": 402, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc065c584-69", "ovs_interfaceid": "c065c584-69ab-4d52-8de2-906db2d8c438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.501779] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1704.501779] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1704.502402] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d75d34fc-3f0d-45b5-b5b2-43f31e8ca15f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.509741] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1704.509741] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52970792-802f-e997-106b-c9082308df45" [ 1704.509741] env[62510]: _type = "Task" [ 1704.509741] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.519515] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52970792-802f-e997-106b-c9082308df45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.936747] env[62510]: DEBUG oslo_vmware.api [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1768990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261497} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.937039] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1704.937262] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1704.937415] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1704.937589] env[62510]: INFO nova.compute.manager [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Took 2.54 seconds to destroy the instance on the hypervisor. [ 1704.937831] env[62510]: DEBUG oslo.service.loopingcall [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1704.941051] env[62510]: DEBUG nova.compute.manager [-] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1704.941170] env[62510]: DEBUG nova.network.neutron [-] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1704.992723] env[62510]: DEBUG oslo_concurrency.lockutils [req-7f778233-4e8a-4185-beb9-a435eca243df req-96c83a29-2f50-4a7f-948b-16db007c4390 service nova] Releasing lock "refresh_cache-1d644c4f-1fd4-4251-aeef-5777d3f4b94c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.022859] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52970792-802f-e997-106b-c9082308df45, 'name': SearchDatastore_Task, 'duration_secs': 0.014677} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.026567] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a60dd66-62a2-441e-848b-4e16aa255eb5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.032951] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1705.032951] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c05b66-0eb8-adf0-c0d3-76633aef5305" [ 1705.032951] env[62510]: _type = "Task" [ 1705.032951] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.043615] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c05b66-0eb8-adf0-c0d3-76633aef5305, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.134681] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb31b41a-1953-4ac5-93e5-64c11f63630f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.143581] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248ed553-0cec-44e8-9024-db2090261ac3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.178883] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3552ad5b-a939-4b50-af4e-783eaf6b5f06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.188352] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ab5837-d2a5-4df4-85d4-035cda7205e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.204974] env[62510]: DEBUG nova.compute.provider_tree [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1705.271115] env[62510]: DEBUG nova.compute.manager [req-17f98da5-47ec-4ef9-9002-7eadd1b4954f req-79402fb0-30e3-4daa-98c2-9815d2e41ad4 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-vif-plugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1705.271115] env[62510]: DEBUG oslo_concurrency.lockutils [req-17f98da5-47ec-4ef9-9002-7eadd1b4954f req-79402fb0-30e3-4daa-98c2-9815d2e41ad4 service nova] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.271115] env[62510]: DEBUG oslo_concurrency.lockutils [req-17f98da5-47ec-4ef9-9002-7eadd1b4954f req-79402fb0-30e3-4daa-98c2-9815d2e41ad4 service nova] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1705.271115] env[62510]: DEBUG oslo_concurrency.lockutils [req-17f98da5-47ec-4ef9-9002-7eadd1b4954f req-79402fb0-30e3-4daa-98c2-9815d2e41ad4 service nova] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.271115] env[62510]: DEBUG nova.compute.manager [req-17f98da5-47ec-4ef9-9002-7eadd1b4954f req-79402fb0-30e3-4daa-98c2-9815d2e41ad4 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] No waiting events found dispatching network-vif-plugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1705.271115] env[62510]: WARNING nova.compute.manager [req-17f98da5-47ec-4ef9-9002-7eadd1b4954f req-79402fb0-30e3-4daa-98c2-9815d2e41ad4 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received unexpected event network-vif-plugged-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 for instance with vm_state shelved_offloaded and task_state spawning. [ 1705.324545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.324810] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.325032] env[62510]: DEBUG nova.network.neutron [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1705.545955] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c05b66-0eb8-adf0-c0d3-76633aef5305, 'name': SearchDatastore_Task, 'duration_secs': 0.011201} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.546420] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.546679] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1d644c4f-1fd4-4251-aeef-5777d3f4b94c/1d644c4f-1fd4-4251-aeef-5777d3f4b94c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1705.547175] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65e08ab9-327f-43b2-9ddf-f3d701be69e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.555520] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1705.555520] env[62510]: value = "task-1768991" [ 1705.555520] env[62510]: _type = "Task" [ 1705.555520] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.566265] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.708423] env[62510]: DEBUG nova.scheduler.client.report [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1705.734612] env[62510]: DEBUG nova.network.neutron [-] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.754325] env[62510]: DEBUG nova.compute.manager [req-1235f316-c0cf-4d9e-aa31-02f41a4afc8d req-6e9a8818-6284-4e44-b9c7-aa5eabd96997 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Received event network-vif-deleted-1504f2f8-ef63-437a-b979-f2a95995a28f {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1705.754325] env[62510]: INFO nova.compute.manager [req-1235f316-c0cf-4d9e-aa31-02f41a4afc8d req-6e9a8818-6284-4e44-b9c7-aa5eabd96997 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Neutron deleted interface 1504f2f8-ef63-437a-b979-f2a95995a28f; detaching it from the instance and deleting it from the info cache [ 1705.754325] env[62510]: DEBUG nova.network.neutron [req-1235f316-c0cf-4d9e-aa31-02f41a4afc8d req-6e9a8818-6284-4e44-b9c7-aa5eabd96997 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.067724] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768991, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.083956] env[62510]: DEBUG nova.network.neutron [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.214649] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.497s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.215216] env[62510]: DEBUG nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1706.218051] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.299s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.218301] env[62510]: DEBUG nova.objects.instance [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lazy-loading 'resources' on Instance uuid 8bbafd7f-cdd1-4246-a509-2f97a6f78497 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1706.238901] env[62510]: INFO nova.compute.manager [-] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Took 1.30 seconds to deallocate network for instance. [ 1706.257375] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a7cb351-3aef-4f44-bb25-64e1b001aa0b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.269390] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed045c1-d0e3-4423-bb8a-2d21ad425c24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.308030] env[62510]: DEBUG nova.compute.manager [req-1235f316-c0cf-4d9e-aa31-02f41a4afc8d req-6e9a8818-6284-4e44-b9c7-aa5eabd96997 service nova] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Detach interface failed, port_id=1504f2f8-ef63-437a-b979-f2a95995a28f, reason: Instance ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1706.567192] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768991, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611019} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.567505] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1d644c4f-1fd4-4251-aeef-5777d3f4b94c/1d644c4f-1fd4-4251-aeef-5777d3f4b94c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1706.567706] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1706.567958] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-614d0a47-9023-4360-b305-6b05ec05398d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.576815] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1706.576815] env[62510]: value = "task-1768992" [ 1706.576815] env[62510]: _type = "Task" [ 1706.576815] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.588372] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.590597] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768992, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.724723] env[62510]: DEBUG nova.compute.utils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1706.727580] env[62510]: DEBUG nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1706.727788] env[62510]: DEBUG nova.network.neutron [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1706.746052] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.779521] env[62510]: DEBUG nova.policy [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97a7f1ca55d549a3985e95b6bbc665f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94a46473611d4b22be7c66c909d1b348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1707.090458] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768992, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14502} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.093339] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1707.094387] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc67c210-160d-41ed-a6e8-5bbe91844aeb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.120065] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 1d644c4f-1fd4-4251-aeef-5777d3f4b94c/1d644c4f-1fd4-4251-aeef-5777d3f4b94c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1707.122912] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ec11285-c61e-4465-821e-8dbd015df4af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.149612] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1707.149612] env[62510]: value = "task-1768993" [ 1707.149612] env[62510]: _type = "Task" [ 1707.149612] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.161511] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.172881] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9060ee3-714f-440b-a9e0-5e3b56b2ff55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.181438] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ad1028-c7b1-40a1-beeb-48ecd5e1e9e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.214730] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c01970-f659-4732-8f35-ade27477117c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.223782] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f2c112-257c-4030-bc56-c06aacb97b9a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.245622] env[62510]: DEBUG nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1707.249170] env[62510]: DEBUG nova.compute.provider_tree [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1707.254559] env[62510]: DEBUG nova.network.neutron [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Successfully created port: 300bfb1b-5d55-410a-b114-cb4043425263 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1707.376555] env[62510]: DEBUG nova.compute.manager [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1707.376830] env[62510]: DEBUG nova.compute.manager [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing instance network info cache due to event network-changed-d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1707.377253] env[62510]: DEBUG oslo_concurrency.lockutils [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] Acquiring lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.377468] env[62510]: DEBUG oslo_concurrency.lockutils [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] Acquired lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.377688] env[62510]: DEBUG nova.network.neutron [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Refreshing network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1707.664023] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768993, 'name': ReconfigVM_Task, 'duration_secs': 0.421969} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.664435] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 1d644c4f-1fd4-4251-aeef-5777d3f4b94c/1d644c4f-1fd4-4251-aeef-5777d3f4b94c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1707.665356] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ee546e8-6549-477c-8ea5-79dbb864cc07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.676132] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1707.676132] env[62510]: value = "task-1768994" [ 1707.676132] env[62510]: _type = "Task" [ 1707.676132] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.689482] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768994, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.755660] env[62510]: DEBUG nova.scheduler.client.report [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1708.188569] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768994, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.194559] env[62510]: DEBUG nova.network.neutron [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updated VIF entry in instance network info cache for port d6ee81d1-3abc-4d5e-a8ca-658407cbd553. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1708.194947] env[62510]: DEBUG nova.network.neutron [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [{"id": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "address": "fa:16:3e:45:ab:75", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6ee81d1-3a", "ovs_interfaceid": "d6ee81d1-3abc-4d5e-a8ca-658407cbd553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.262026] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.044s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.265244] env[62510]: DEBUG nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1708.270647] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.706s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.270647] env[62510]: DEBUG nova.objects.instance [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'resources' on Instance uuid 0082eb97-26e9-4196-b8e3-63460d32dd19 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1708.342013] env[62510]: INFO nova.scheduler.client.report [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Deleted allocations for instance 8bbafd7f-cdd1-4246-a509-2f97a6f78497 [ 1708.689762] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768994, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.698581] env[62510]: DEBUG oslo_concurrency.lockutils [req-fcc83b9f-baab-49a7-8c5c-47c909ddf675 req-8590b1c6-8687-4645-a9e1-cfd17089fa60 service nova] Releasing lock "refresh_cache-83fa0d32-18ee-401d-af0b-a0adb538e5f4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.838723] env[62510]: DEBUG nova.network.neutron [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Successfully updated port: 300bfb1b-5d55-410a-b114-cb4043425263 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1708.852700] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a014305d-2438-47a2-83ea-eca5b8202f1c tempest-VolumesAssistedSnapshotsTest-6475889 tempest-VolumesAssistedSnapshotsTest-6475889-project-member] Lock "8bbafd7f-cdd1-4246-a509-2f97a6f78497" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.140s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.192379] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768994, 'name': Rename_Task, 'duration_secs': 1.194622} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.195909] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1709.196702] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-505506f7-47a1-4a60-af6a-435733bcf8e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.205660] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1709.205660] env[62510]: value = "task-1768995" [ 1709.205660] env[62510]: _type = "Task" [ 1709.205660] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.211209] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3011b072-f678-4861-aa05-b9b4618ae270 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.221550] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.226607] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86305f40-7e3e-41df-8183-53466d1ea27d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.262048] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759140e4-8d74-444a-bb51-bd0ec0e68aa5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.270544] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b00180-fe6a-47ea-8500-5247a11419a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.285304] env[62510]: DEBUG nova.compute.provider_tree [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1709.343845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "refresh_cache-b7ffe11f-2f63-419b-9ad8-0a89a05d201c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.343845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "refresh_cache-b7ffe11f-2f63-419b-9ad8-0a89a05d201c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.343845] env[62510]: DEBUG nova.network.neutron [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1709.413506] env[62510]: DEBUG nova.compute.manager [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Received event network-vif-plugged-300bfb1b-5d55-410a-b114-cb4043425263 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1709.413506] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] Acquiring lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.413751] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.413793] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.413930] env[62510]: DEBUG nova.compute.manager [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] No waiting events found dispatching network-vif-plugged-300bfb1b-5d55-410a-b114-cb4043425263 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1709.414468] env[62510]: WARNING nova.compute.manager [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Received unexpected event network-vif-plugged-300bfb1b-5d55-410a-b114-cb4043425263 for instance with vm_state building and task_state spawning. [ 1709.414689] env[62510]: DEBUG nova.compute.manager [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Received event network-changed-300bfb1b-5d55-410a-b114-cb4043425263 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1709.414878] env[62510]: DEBUG nova.compute.manager [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Refreshing instance network info cache due to event network-changed-300bfb1b-5d55-410a-b114-cb4043425263. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1709.415088] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] Acquiring lock "refresh_cache-b7ffe11f-2f63-419b-9ad8-0a89a05d201c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.720089] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768995, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.789412] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='f598a67200ac88069b451f5057abb42b',container_format='bare',created_at=2024-12-11T19:37:58Z,direct_url=,disk_format='vmdk',id=b54fc7f8-0408-41bd-abcd-6c673fa40237,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1530971102-shelved',owner='3109fa7889c64dfda2117d4cd58aa528',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-12-11T19:38:13Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1709.789412] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1709.789412] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1709.789660] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1709.790337] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1709.790337] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1709.790556] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1709.790645] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1709.791314] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1709.791314] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1709.791615] env[62510]: DEBUG nova.virt.hardware [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1709.792595] env[62510]: DEBUG nova.scheduler.client.report [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1709.798333] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2835c6fb-71c2-49c8-bdf6-10a516622cfa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.808614] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df2a033-5673-40c7-996c-93738d42dac7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.815604] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1709.815897] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1709.816068] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1709.816249] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1709.816441] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1709.816673] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1709.817032] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1709.817163] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1709.817370] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1709.817571] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1709.817814] env[62510]: DEBUG nova.virt.hardware [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1709.820402] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ff6ae4-2a55-4f63-8446-39015ce4676f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.838596] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:ab:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6ee81d1-3abc-4d5e-a8ca-658407cbd553', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1709.846889] env[62510]: DEBUG oslo.service.loopingcall [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1709.849411] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a3b75-30c4-0262-2583-5783611b8fba/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1709.854781] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1709.855795] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bec808-9bb2-4fc7-9389-5a3637527ddf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.858520] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-454b2648-3444-40bb-b97c-bd0b67f5d2b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.876153] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c57976-e58b-4962-95e1-6860385b85e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.887336] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a3b75-30c4-0262-2583-5783611b8fba/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1709.887336] env[62510]: ERROR oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a3b75-30c4-0262-2583-5783611b8fba/disk-0.vmdk due to incomplete transfer. [ 1709.896852] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-419edfeb-e1b5-4066-b7d5-937b18e910fb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.898902] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1709.898902] env[62510]: value = "task-1768996" [ 1709.898902] env[62510]: _type = "Task" [ 1709.898902] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.910901] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768996, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.920962] env[62510]: DEBUG oslo_vmware.rw_handles [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/529a3b75-30c4-0262-2583-5783611b8fba/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1709.921141] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Uploaded image 37ce35c0-4c91-45fb-b27b-04201e3f0d27 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1709.922962] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1709.923280] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5c40279d-283d-4b8f-8e47-d9cfb0bf76bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.932261] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1709.932261] env[62510]: value = "task-1768997" [ 1709.932261] env[62510]: _type = "Task" [ 1709.932261] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.938197] env[62510]: DEBUG nova.network.neutron [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1709.945712] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768997, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.141243] env[62510]: DEBUG nova.network.neutron [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Updating instance_info_cache with network_info: [{"id": "300bfb1b-5d55-410a-b114-cb4043425263", "address": "fa:16:3e:f6:64:b8", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap300bfb1b-5d", "ovs_interfaceid": "300bfb1b-5d55-410a-b114-cb4043425263", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.219446] env[62510]: DEBUG oslo_vmware.api [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1768995, 'name': PowerOnVM_Task, 'duration_secs': 0.648819} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.219861] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1710.220118] env[62510]: INFO nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1710.220305] env[62510]: DEBUG nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1710.221115] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85da0b13-d9b3-4d56-a2c4-b778e81b09d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.302599] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.035s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.305790] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.606s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.305790] env[62510]: DEBUG nova.objects.instance [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1710.337785] env[62510]: INFO nova.scheduler.client.report [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted allocations for instance 0082eb97-26e9-4196-b8e3-63460d32dd19 [ 1710.411114] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768996, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.444234] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768997, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.647676] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "refresh_cache-b7ffe11f-2f63-419b-9ad8-0a89a05d201c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.648155] env[62510]: DEBUG nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Instance network_info: |[{"id": "300bfb1b-5d55-410a-b114-cb4043425263", "address": "fa:16:3e:f6:64:b8", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap300bfb1b-5d", "ovs_interfaceid": "300bfb1b-5d55-410a-b114-cb4043425263", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1710.648565] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] Acquired lock "refresh_cache-b7ffe11f-2f63-419b-9ad8-0a89a05d201c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1710.648843] env[62510]: DEBUG nova.network.neutron [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Refreshing network info cache for port 300bfb1b-5d55-410a-b114-cb4043425263 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1710.650352] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:64:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '300bfb1b-5d55-410a-b114-cb4043425263', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1710.659789] env[62510]: DEBUG oslo.service.loopingcall [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1710.660972] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1710.661324] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb230a6e-acfa-4000-b97d-e7b79629ba27 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.684647] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1710.684647] env[62510]: value = "task-1768998" [ 1710.684647] env[62510]: _type = "Task" [ 1710.684647] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.693569] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768998, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.743423] env[62510]: INFO nova.compute.manager [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Took 53.50 seconds to build instance. [ 1710.848669] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66791635-44cf-46b7-8794-6454f0561e0d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0082eb97-26e9-4196-b8e3-63460d32dd19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.865s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.921334] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768996, 'name': CreateVM_Task, 'duration_secs': 0.687887} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.923536] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b0a12-787b-984a-5b85-6467e01a70ce/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1710.923762] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1710.925707] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d2cdfd-8e06-4519-9049-570c5607d912 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.928773] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1710.928946] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1710.929654] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1710.929953] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fc9138e-544d-4120-96dc-d25cb2da0880 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.939711] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b0a12-787b-984a-5b85-6467e01a70ce/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1710.939711] env[62510]: ERROR oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b0a12-787b-984a-5b85-6467e01a70ce/disk-0.vmdk due to incomplete transfer. [ 1710.939911] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1710.939911] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f9045d-91c8-c7f2-4dd3-d63fdf506ca6" [ 1710.939911] env[62510]: _type = "Task" [ 1710.939911] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.940518] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-61a974c9-043a-4087-a272-8ad54b67a957 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.948621] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768997, 'name': Destroy_Task, 'duration_secs': 0.966627} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.949262] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Destroyed the VM [ 1710.949530] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1710.949836] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-11a200e5-07af-4d9f-8c98-43dfa8dd16ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.954508] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f9045d-91c8-c7f2-4dd3-d63fdf506ca6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.959747] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1710.959747] env[62510]: value = "task-1768999" [ 1710.959747] env[62510]: _type = "Task" [ 1710.959747] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.961250] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b0a12-787b-984a-5b85-6467e01a70ce/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1710.961563] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Uploaded image f6223ac6-801a-4cf1-b252-449e54e92fe4 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1710.963938] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1710.967516] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e3c591cc-5863-4ccb-b5d2-e49b06173761 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.975011] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768999, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.976421] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1710.976421] env[62510]: value = "task-1769000" [ 1710.976421] env[62510]: _type = "Task" [ 1710.976421] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.984947] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769000, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.199485] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768998, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.245463] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f429914-1354-4d50-9676-766c9fa94fc7 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.020s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.322009] env[62510]: DEBUG oslo_concurrency.lockutils [None req-772f9151-fec2-460a-ac63-b4c77cdcb078 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.322009] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.230s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.323108] env[62510]: INFO nova.compute.claims [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1711.330211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.330211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.330211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.330211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.330211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.331292] env[62510]: INFO nova.compute.manager [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Terminating instance [ 1711.385863] env[62510]: DEBUG nova.network.neutron [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Updated VIF entry in instance network info cache for port 300bfb1b-5d55-410a-b114-cb4043425263. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1711.386247] env[62510]: DEBUG nova.network.neutron [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Updating instance_info_cache with network_info: [{"id": "300bfb1b-5d55-410a-b114-cb4043425263", "address": "fa:16:3e:f6:64:b8", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap300bfb1b-5d", "ovs_interfaceid": "300bfb1b-5d55-410a-b114-cb4043425263", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.454293] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f9045d-91c8-c7f2-4dd3-d63fdf506ca6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.472493] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768999, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.487213] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769000, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.698618] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768998, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.836297] env[62510]: DEBUG nova.compute.manager [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1711.836642] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1711.837656] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61a8999-6e3d-49bc-966b-0434422d5f38 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.847921] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1711.848344] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66c9d465-519f-403d-987d-786d676bb817 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.858585] env[62510]: DEBUG oslo_vmware.api [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1711.858585] env[62510]: value = "task-1769001" [ 1711.858585] env[62510]: _type = "Task" [ 1711.858585] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.869859] env[62510]: DEBUG oslo_vmware.api [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1769001, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.889392] env[62510]: DEBUG oslo_concurrency.lockutils [req-cf0d47f7-c50f-48bd-9ed8-d0e27df9b6e4 req-3933489d-2936-4a50-9fc1-1afd3567555b service nova] Releasing lock "refresh_cache-b7ffe11f-2f63-419b-9ad8-0a89a05d201c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.957687] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.958188] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Processing image b54fc7f8-0408-41bd-abcd-6c673fa40237 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1711.958392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237/b54fc7f8-0408-41bd-abcd-6c673fa40237.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1711.958621] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237/b54fc7f8-0408-41bd-abcd-6c673fa40237.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1711.961061] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1711.961061] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14d095fc-cfc1-4b00-b865-193e5a68cd8a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.972020] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768999, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.987371] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769000, 'name': Destroy_Task, 'duration_secs': 0.854278} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.987371] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Destroyed the VM [ 1711.987371] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1711.987561] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0618cfd4-c91d-4863-bba6-92ff02b661fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.995926] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1711.995926] env[62510]: value = "task-1769002" [ 1711.995926] env[62510]: _type = "Task" [ 1711.995926] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.010068] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769002, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.197103] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768998, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.376117] env[62510]: DEBUG oslo_vmware.api [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1769001, 'name': PowerOffVM_Task, 'duration_secs': 0.234252} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.378972] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1712.379199] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1712.383984] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a1ac862-471b-4cda-8fd2-2d268cd2da23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.389964] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1712.390196] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1712.391793] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8992658d-d003-4620-8eeb-cd3921dd25c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.400838] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1712.400838] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522d68a5-ccb4-870d-54f7-fc5f70241897" [ 1712.400838] env[62510]: _type = "Task" [ 1712.400838] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.412859] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522d68a5-ccb4-870d-54f7-fc5f70241897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.480028] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1768999, 'name': RemoveSnapshot_Task, 'duration_secs': 1.097857} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.480028] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1712.480028] env[62510]: DEBUG nova.compute.manager [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1712.480028] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbe116b-84b4-437e-8440-bf0cf9af38b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.510153] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769002, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.698024] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1712.698024] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1712.698164] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Deleting the datastore file [datastore1] 1d644c4f-1fd4-4251-aeef-5777d3f4b94c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1712.698428] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e605baf6-663b-493a-872e-6c52f5cc3802 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.704271] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1768998, 'name': CreateVM_Task, 'duration_secs': 1.518654} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.706321] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1712.707784] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.707784] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.707784] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1712.707958] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1229f27-734e-40f0-932d-125412f25ae0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.712373] env[62510]: DEBUG oslo_vmware.api [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for the task: (returnval){ [ 1712.712373] env[62510]: value = "task-1769004" [ 1712.712373] env[62510]: _type = "Task" [ 1712.712373] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.716358] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1712.716358] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528c0fa6-1a33-b425-96fb-bf6cb376048e" [ 1712.716358] env[62510]: _type = "Task" [ 1712.716358] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.731074] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528c0fa6-1a33-b425-96fb-bf6cb376048e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.736997] env[62510]: DEBUG oslo_vmware.api [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1769004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.801049] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabe39d5-2404-4097-a8e7-9f89b9356039 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.810694] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc189e51-8c0a-4b59-9e87-fa7fdf5b1195 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.848381] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c14176-17fd-40f2-8ba1-5f376658e309 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.858846] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e88f52d-2284-4c6f-b0c0-8616281cd0c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.871720] env[62510]: DEBUG nova.compute.provider_tree [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.883768] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "774ea198-c933-449a-8380-2e4cc9327389" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.883995] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.913381] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Preparing fetch location {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1712.913381] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Fetch image to [datastore1] OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8/OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8.vmdk {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1712.913552] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Downloading stream optimized image b54fc7f8-0408-41bd-abcd-6c673fa40237 to [datastore1] OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8/OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8.vmdk on the data store datastore1 as vApp {{(pid=62510) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1712.913695] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Downloading image file data b54fc7f8-0408-41bd-abcd-6c673fa40237 to the ESX as VM named 'OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8' {{(pid=62510) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1713.001746] env[62510]: INFO nova.compute.manager [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Shelve offloading [ 1713.005217] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1713.005217] env[62510]: value = "resgroup-9" [ 1713.005217] env[62510]: _type = "ResourcePool" [ 1713.005217] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1713.005891] env[62510]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-6ae0d9f6-df38-4972-9e64-8222d7394558 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.029341] env[62510]: DEBUG oslo_vmware.api [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769002, 'name': RemoveSnapshot_Task, 'duration_secs': 0.921818} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.030531] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1713.030764] env[62510]: INFO nova.compute.manager [None req-a9fa7169-0d2e-47de-8739-7dc627eb07d1 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Took 15.88 seconds to snapshot the instance on the hypervisor. [ 1713.036453] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease: (returnval){ [ 1713.036453] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522b155b-45f4-fcbc-b9fa-e6b12cfdb9ab" [ 1713.036453] env[62510]: _type = "HttpNfcLease" [ 1713.036453] env[62510]: } obtained for vApp import into resource pool (val){ [ 1713.036453] env[62510]: value = "resgroup-9" [ 1713.036453] env[62510]: _type = "ResourcePool" [ 1713.036453] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1713.038693] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the lease: (returnval){ [ 1713.038693] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522b155b-45f4-fcbc-b9fa-e6b12cfdb9ab" [ 1713.038693] env[62510]: _type = "HttpNfcLease" [ 1713.038693] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1713.044607] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1713.044607] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522b155b-45f4-fcbc-b9fa-e6b12cfdb9ab" [ 1713.044607] env[62510]: _type = "HttpNfcLease" [ 1713.044607] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1713.231259] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528c0fa6-1a33-b425-96fb-bf6cb376048e, 'name': SearchDatastore_Task, 'duration_secs': 0.025155} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.233891] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.234154] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1713.234431] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.234595] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.234768] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1713.235052] env[62510]: DEBUG oslo_vmware.api [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Task: {'id': task-1769004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167081} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.235254] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd76d8fa-9341-400b-ac21-05124725c50c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.237028] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1713.237220] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1713.237478] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1713.237744] env[62510]: INFO nova.compute.manager [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1713.238007] env[62510]: DEBUG oslo.service.loopingcall [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1713.238257] env[62510]: DEBUG nova.compute.manager [-] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1713.238399] env[62510]: DEBUG nova.network.neutron [-] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1713.248174] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1713.248174] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1713.248497] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63f6588e-c5b6-4b3c-b01f-972b7d7927f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.254311] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1713.254311] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c7dc0c-2537-2a52-bde7-a9f3e73e751c" [ 1713.254311] env[62510]: _type = "Task" [ 1713.254311] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.262990] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c7dc0c-2537-2a52-bde7-a9f3e73e751c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.375436] env[62510]: DEBUG nova.scheduler.client.report [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1713.386972] env[62510]: DEBUG nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1713.536094] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1713.536094] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d7104a9-042b-477b-ba8c-2fe7a0ead942 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.544225] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1713.544225] env[62510]: value = "task-1769006" [ 1713.544225] env[62510]: _type = "Task" [ 1713.544225] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.553488] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1713.553488] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522b155b-45f4-fcbc-b9fa-e6b12cfdb9ab" [ 1713.553488] env[62510]: _type = "HttpNfcLease" [ 1713.553488] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1713.557680] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1713.557913] env[62510]: DEBUG nova.compute.manager [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1713.558995] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28884df4-5dd4-4c92-bff8-a9d3f5f126f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.566849] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.567140] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.567255] env[62510]: DEBUG nova.network.neutron [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1713.604180] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "144052ab-e3e7-401f-9edb-d8088780e468" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.604516] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "144052ab-e3e7-401f-9edb-d8088780e468" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.671459] env[62510]: DEBUG nova.compute.manager [req-9b9b976e-dcfe-4aa9-bfce-8053909ed648 req-109d9898-0724-4f96-950e-ce848a7e6dfa service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Received event network-vif-deleted-c065c584-69ab-4d52-8de2-906db2d8c438 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1713.671665] env[62510]: INFO nova.compute.manager [req-9b9b976e-dcfe-4aa9-bfce-8053909ed648 req-109d9898-0724-4f96-950e-ce848a7e6dfa service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Neutron deleted interface c065c584-69ab-4d52-8de2-906db2d8c438; detaching it from the instance and deleting it from the info cache [ 1713.671848] env[62510]: DEBUG nova.network.neutron [req-9b9b976e-dcfe-4aa9-bfce-8053909ed648 req-109d9898-0724-4f96-950e-ce848a7e6dfa service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.766999] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c7dc0c-2537-2a52-bde7-a9f3e73e751c, 'name': SearchDatastore_Task, 'duration_secs': 0.010069} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.767901] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb4fc88e-a912-45ab-bf55-fe8970607aa7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.774630] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1713.774630] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524479fa-d5e0-dde2-b0be-447da242caf2" [ 1713.774630] env[62510]: _type = "Task" [ 1713.774630] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.783157] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524479fa-d5e0-dde2-b0be-447da242caf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.880671] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.881217] env[62510]: DEBUG nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1713.884163] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.940s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.884380] env[62510]: DEBUG nova.objects.instance [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'resources' on Instance uuid 90869287-22bd-438c-8684-56f5d43e3ca8 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1713.908254] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.047250] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1714.047250] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522b155b-45f4-fcbc-b9fa-e6b12cfdb9ab" [ 1714.047250] env[62510]: _type = "HttpNfcLease" [ 1714.047250] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1714.047544] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1714.047544] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522b155b-45f4-fcbc-b9fa-e6b12cfdb9ab" [ 1714.047544] env[62510]: _type = "HttpNfcLease" [ 1714.047544] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1714.048270] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b40f065-c411-4a1c-b6f6-c98d83460f62 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.055691] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9520c-5dca-6725-70da-7c007cc140f4/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1714.056314] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9520c-5dca-6725-70da-7c007cc140f4/disk-0.vmdk. {{(pid=62510) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1714.114616] env[62510]: DEBUG nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1714.121852] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-778aa469-a577-4c70-b6fb-89c28d1b48b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.140446] env[62510]: DEBUG nova.network.neutron [-] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.176257] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ca5cf89-667a-4699-bcaa-235e509ff7d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.192203] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9b683f-6a9c-43df-a595-a203029ca877 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.234592] env[62510]: DEBUG nova.compute.manager [req-9b9b976e-dcfe-4aa9-bfce-8053909ed648 req-109d9898-0724-4f96-950e-ce848a7e6dfa service nova] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Detach interface failed, port_id=c065c584-69ab-4d52-8de2-906db2d8c438, reason: Instance 1d644c4f-1fd4-4251-aeef-5777d3f4b94c could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1714.285769] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524479fa-d5e0-dde2-b0be-447da242caf2, 'name': SearchDatastore_Task, 'duration_secs': 0.010805} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.286107] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.286319] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b7ffe11f-2f63-419b-9ad8-0a89a05d201c/b7ffe11f-2f63-419b-9ad8-0a89a05d201c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1714.286603] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c73f696-488b-4071-9148-eeacfb8afb8b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.297236] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1714.297236] env[62510]: value = "task-1769007" [ 1714.297236] env[62510]: _type = "Task" [ 1714.297236] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.308532] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.382416] env[62510]: DEBUG nova.network.neutron [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fb40b1-13", "ovs_interfaceid": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.388577] env[62510]: DEBUG nova.compute.utils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1714.395391] env[62510]: DEBUG nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1714.395605] env[62510]: DEBUG nova.network.neutron [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1714.457545] env[62510]: DEBUG nova.policy [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa4c3fa8aa6141558d7eb16e0e726b96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68579c8354b4431e8ec51575cda77325', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1714.638085] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.643848] env[62510]: INFO nova.compute.manager [-] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Took 1.41 seconds to deallocate network for instance. [ 1714.811831] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769007, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.813638] env[62510]: DEBUG nova.network.neutron [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Successfully created port: ac9385ca-72de-486a-9901-44c6387f7c86 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1714.888601] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.899501] env[62510]: DEBUG nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1714.943589] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e63449c-b422-4a28-a7f7-9d99a5a51d44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.960166] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ee3203-5db5-4901-8988-0ab170e21e81 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.006200] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e051ed8-4141-495d-b0aa-fa54aeaa2ea1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.020248] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d179e4e6-a3c7-4cfa-9d51-67961d00b226 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.040363] env[62510]: DEBUG nova.compute.provider_tree [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.150724] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.311552] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554142} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.311968] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] b7ffe11f-2f63-419b-9ad8-0a89a05d201c/b7ffe11f-2f63-419b-9ad8-0a89a05d201c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1715.312225] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1715.312487] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70ace6af-3458-4811-94d9-a41a2b4d0521 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.321097] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1715.321097] env[62510]: value = "task-1769008" [ 1715.321097] env[62510]: _type = "Task" [ 1715.321097] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.330736] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.370149] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Completed reading data from the image iterator. {{(pid=62510) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1715.370498] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9520c-5dca-6725-70da-7c007cc140f4/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1715.371709] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581eb70c-2dcf-47da-a5ec-78d3f9c4bc1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.383592] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9520c-5dca-6725-70da-7c007cc140f4/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1715.383767] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9520c-5dca-6725-70da-7c007cc140f4/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1715.384050] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-17aa4098-99b8-4873-a929-8eb988aee061 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.422711] env[62510]: DEBUG nova.compute.manager [req-9d8e4f4e-06a9-41fd-962b-bf11c93d39c6 req-e044d179-518a-4a42-b2f6-18a281f40164 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received event network-vif-unplugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1715.422874] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d8e4f4e-06a9-41fd-962b-bf11c93d39c6 req-e044d179-518a-4a42-b2f6-18a281f40164 service nova] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.424273] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d8e4f4e-06a9-41fd-962b-bf11c93d39c6 req-e044d179-518a-4a42-b2f6-18a281f40164 service nova] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.424273] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d8e4f4e-06a9-41fd-962b-bf11c93d39c6 req-e044d179-518a-4a42-b2f6-18a281f40164 service nova] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.424478] env[62510]: DEBUG nova.compute.manager [req-9d8e4f4e-06a9-41fd-962b-bf11c93d39c6 req-e044d179-518a-4a42-b2f6-18a281f40164 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] No waiting events found dispatching network-vif-unplugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1715.424545] env[62510]: WARNING nova.compute.manager [req-9d8e4f4e-06a9-41fd-962b-bf11c93d39c6 req-e044d179-518a-4a42-b2f6-18a281f40164 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received unexpected event network-vif-unplugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 for instance with vm_state shelved and task_state shelving_offloading. [ 1715.448195] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1715.449275] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10daa767-2ee9-485a-9a49-6f4a63b706de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.459086] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1715.459475] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1512cd7-f704-4411-b55f-69967d7bc3c7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.544994] env[62510]: DEBUG nova.scheduler.client.report [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1715.580688] env[62510]: DEBUG oslo_vmware.rw_handles [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52d9520c-5dca-6725-70da-7c007cc140f4/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1715.581090] env[62510]: INFO nova.virt.vmwareapi.images [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Downloaded image file data b54fc7f8-0408-41bd-abcd-6c673fa40237 [ 1715.582966] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92a29f0-5bd5-43ed-810e-a7ad878ee516 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.599599] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-134b1e25-9dbe-4ded-b40a-651da18454bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.632051] env[62510]: INFO nova.virt.vmwareapi.images [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] The imported VM was unregistered [ 1715.634729] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Caching image {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1715.635010] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating directory with path [datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1715.635299] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b67bc78d-8e59-4d36-9d6c-3a11c2306989 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.689923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1715.690241] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1715.690429] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleting the datastore file [datastore1] e7daad63-c802-4a86-bead-7e849064ed61 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1715.690771] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9f79cfc-4157-4746-8a63-3ad9cff154c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.699726] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1715.699726] env[62510]: value = "task-1769011" [ 1715.699726] env[62510]: _type = "Task" [ 1715.699726] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.701367] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created directory with path [datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1715.701607] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8/OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8.vmdk to [datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237/b54fc7f8-0408-41bd-abcd-6c673fa40237.vmdk. {{(pid=62510) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1715.705178] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c816c1d4-2d52-48ce-9bd9-fec02267613e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.714188] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.714383] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1715.714383] env[62510]: value = "task-1769012" [ 1715.714383] env[62510]: _type = "Task" [ 1715.714383] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.723575] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.832708] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.912281] env[62510]: DEBUG nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1715.940405] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1715.940667] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1715.940822] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1715.941010] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1715.941262] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1715.941451] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1715.941700] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1715.941896] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1715.942116] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1715.942323] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1715.942540] env[62510]: DEBUG nova.virt.hardware [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1715.943449] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d354bcf-3f4d-478a-bbef-d110bf679a2a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.954290] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f57f92b-8b52-488d-9e80-1340b8f20e42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.050537] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.166s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.053302] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.595s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.053302] env[62510]: DEBUG nova.objects.instance [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lazy-loading 'resources' on Instance uuid d1c20183-ba24-4a11-ad82-bf240d581322 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1716.080578] env[62510]: INFO nova.scheduler.client.report [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted allocations for instance 90869287-22bd-438c-8684-56f5d43e3ca8 [ 1716.214413] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.224323] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.315337] env[62510]: DEBUG nova.compute.manager [req-6591e08a-4141-4e6c-abbd-c284aea32595 req-8904a1e3-abdb-43e8-8656-99fb33a7fc36 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Received event network-vif-plugged-ac9385ca-72de-486a-9901-44c6387f7c86 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1716.315554] env[62510]: DEBUG oslo_concurrency.lockutils [req-6591e08a-4141-4e6c-abbd-c284aea32595 req-8904a1e3-abdb-43e8-8656-99fb33a7fc36 service nova] Acquiring lock "2dce738b-9624-4a74-8b8c-042e45b693b0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.315812] env[62510]: DEBUG oslo_concurrency.lockutils [req-6591e08a-4141-4e6c-abbd-c284aea32595 req-8904a1e3-abdb-43e8-8656-99fb33a7fc36 service nova] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.316069] env[62510]: DEBUG oslo_concurrency.lockutils [req-6591e08a-4141-4e6c-abbd-c284aea32595 req-8904a1e3-abdb-43e8-8656-99fb33a7fc36 service nova] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.316176] env[62510]: DEBUG nova.compute.manager [req-6591e08a-4141-4e6c-abbd-c284aea32595 req-8904a1e3-abdb-43e8-8656-99fb33a7fc36 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] No waiting events found dispatching network-vif-plugged-ac9385ca-72de-486a-9901-44c6387f7c86 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1716.316848] env[62510]: WARNING nova.compute.manager [req-6591e08a-4141-4e6c-abbd-c284aea32595 req-8904a1e3-abdb-43e8-8656-99fb33a7fc36 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Received unexpected event network-vif-plugged-ac9385ca-72de-486a-9901-44c6387f7c86 for instance with vm_state building and task_state spawning. [ 1716.332914] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.415715] env[62510]: DEBUG nova.network.neutron [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Successfully updated port: ac9385ca-72de-486a-9901-44c6387f7c86 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1716.593429] env[62510]: DEBUG oslo_concurrency.lockutils [None req-948095a4-a52a-404e-bfaf-f29508e7b9a2 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "90869287-22bd-438c-8684-56f5d43e3ca8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.781s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.715821] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.730907] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.832918] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.920582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "refresh_cache-2dce738b-9624-4a74-8b8c-042e45b693b0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.920821] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "refresh_cache-2dce738b-9624-4a74-8b8c-042e45b693b0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.920971] env[62510]: DEBUG nova.network.neutron [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1716.946127] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a478eb-93b7-48ae-b66b-3af8da97d88c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.955213] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5548b370-3b73-45f6-998e-748e381297a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.990531] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369e46f5-70f5-4d77-933f-a52c5bd2fb1a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.999681] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2393d74-7ed7-4142-99d6-6f1dd9f743fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.014547] env[62510]: DEBUG nova.compute.provider_tree [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1717.215911] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.225642] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.334932] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769008, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.459152] env[62510]: DEBUG nova.compute.manager [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received event network-changed-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1717.459538] env[62510]: DEBUG nova.compute.manager [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Refreshing instance network info cache due to event network-changed-13fb40b1-132b-407d-b6e0-eec141ae88a8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1717.459755] env[62510]: DEBUG oslo_concurrency.lockutils [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] Acquiring lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.459922] env[62510]: DEBUG oslo_concurrency.lockutils [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] Acquired lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.460218] env[62510]: DEBUG nova.network.neutron [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Refreshing network info cache for port 13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1717.477286] env[62510]: DEBUG nova.network.neutron [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1717.518708] env[62510]: DEBUG nova.scheduler.client.report [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1717.658076] env[62510]: DEBUG nova.network.neutron [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Updating instance_info_cache with network_info: [{"id": "ac9385ca-72de-486a-9901-44c6387f7c86", "address": "fa:16:3e:17:a4:43", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac9385ca-72", "ovs_interfaceid": "ac9385ca-72de-486a-9901-44c6387f7c86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.723507] env[62510]: DEBUG oslo_vmware.api [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769011, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.783955} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.726329] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1717.726519] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1717.726714] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1717.735823] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.770255] env[62510]: INFO nova.scheduler.client.report [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleted allocations for instance e7daad63-c802-4a86-bead-7e849064ed61 [ 1717.836581] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769008, 'name': ExtendVirtualDisk_Task, 'duration_secs': 2.037071} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.837078] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1717.838073] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a73136-2cde-41bd-81d5-bec71bd159ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.866952] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] b7ffe11f-2f63-419b-9ad8-0a89a05d201c/b7ffe11f-2f63-419b-9ad8-0a89a05d201c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1717.866952] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f40517d7-781a-4267-996c-2f2130d5c556 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.889902] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1717.889902] env[62510]: value = "task-1769013" [ 1717.889902] env[62510]: _type = "Task" [ 1717.889902] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.904957] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.023870] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.028352] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.584s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.028352] env[62510]: DEBUG nova.objects.instance [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lazy-loading 'resources' on Instance uuid 31fe5643-dece-484f-92d6-7c7cafbd51e4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1718.051107] env[62510]: INFO nova.scheduler.client.report [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Deleted allocations for instance d1c20183-ba24-4a11-ad82-bf240d581322 [ 1718.164376] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "refresh_cache-2dce738b-9624-4a74-8b8c-042e45b693b0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.164752] env[62510]: DEBUG nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Instance network_info: |[{"id": "ac9385ca-72de-486a-9901-44c6387f7c86", "address": "fa:16:3e:17:a4:43", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac9385ca-72", "ovs_interfaceid": "ac9385ca-72de-486a-9901-44c6387f7c86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1718.165964] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:a4:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '26472e27-9835-4f87-ab7f-ca24dfee4e83', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac9385ca-72de-486a-9901-44c6387f7c86', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1718.175180] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Creating folder: Project (68579c8354b4431e8ec51575cda77325). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1718.176802] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b2409c5-fc9f-4a49-be6d-0ffb16898082 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.191279] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Created folder: Project (68579c8354b4431e8ec51575cda77325) in parent group-v367197. [ 1718.191590] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Creating folder: Instances. Parent ref: group-v367392. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1718.192053] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3c3f5a3-53e7-4779-aba1-9c6ca6ac903b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.204473] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Created folder: Instances in parent group-v367392. [ 1718.204907] env[62510]: DEBUG oslo.service.loopingcall [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1718.205164] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1718.205395] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c72cab8d-c6bd-4041-b69e-5b4a1db64754 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.235031] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.236303] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1718.236303] env[62510]: value = "task-1769016" [ 1718.236303] env[62510]: _type = "Task" [ 1718.236303] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.245330] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769016, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.277170] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.354214] env[62510]: DEBUG nova.compute.manager [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Received event network-changed-ac9385ca-72de-486a-9901-44c6387f7c86 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1718.354627] env[62510]: DEBUG nova.compute.manager [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Refreshing instance network info cache due to event network-changed-ac9385ca-72de-486a-9901-44c6387f7c86. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1718.354981] env[62510]: DEBUG oslo_concurrency.lockutils [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] Acquiring lock "refresh_cache-2dce738b-9624-4a74-8b8c-042e45b693b0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.355359] env[62510]: DEBUG oslo_concurrency.lockutils [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] Acquired lock "refresh_cache-2dce738b-9624-4a74-8b8c-042e45b693b0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.355678] env[62510]: DEBUG nova.network.neutron [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Refreshing network info cache for port ac9385ca-72de-486a-9901-44c6387f7c86 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1718.380952] env[62510]: DEBUG nova.network.neutron [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updated VIF entry in instance network info cache for port 13fb40b1-132b-407d-b6e0-eec141ae88a8. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1718.381364] env[62510]: DEBUG nova.network.neutron [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap13fb40b1-13", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1718.407955] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.561290] env[62510]: DEBUG oslo_concurrency.lockutils [None req-512731e2-08e7-44f1-8b0a-853091c2970e tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "d1c20183-ba24-4a11-ad82-bf240d581322" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.930s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.739053] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.753032] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769016, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.886450] env[62510]: DEBUG oslo_concurrency.lockutils [req-538898ff-78c0-403a-9066-10df664a6eb4 req-4e9c21ff-62d1-4d5e-ac92-19c7379660ec service nova] Releasing lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.909043] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769013, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.928401] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ff569c-bc79-452b-a54f-cefe3d9ceb51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.944873] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fbecc8-d7b6-46b9-836d-40ba29e776ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.987297] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29dbc45d-3145-4ea6-bbac-f74eda5cf977 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.996540] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77ff095-d64c-45cf-9544-194d2795b1c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.011887] env[62510]: DEBUG nova.compute.provider_tree [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.158541] env[62510]: DEBUG nova.network.neutron [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Updated VIF entry in instance network info cache for port ac9385ca-72de-486a-9901-44c6387f7c86. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1719.159203] env[62510]: DEBUG nova.network.neutron [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Updating instance_info_cache with network_info: [{"id": "ac9385ca-72de-486a-9901-44c6387f7c86", "address": "fa:16:3e:17:a4:43", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac9385ca-72", "ovs_interfaceid": "ac9385ca-72de-486a-9901-44c6387f7c86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.236771] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.252025] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769016, 'name': CreateVM_Task, 'duration_secs': 0.668515} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.252025] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1719.252025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.252025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.252025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1719.252025] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7e7ec1b-2e0b-4923-b176-0e5e5201e692 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.259963] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1719.259963] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5260a51f-609f-a7af-8fc5-1728885bc807" [ 1719.259963] env[62510]: _type = "Task" [ 1719.259963] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.269383] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5260a51f-609f-a7af-8fc5-1728885bc807, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.404109] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769013, 'name': ReconfigVM_Task, 'duration_secs': 1.027042} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.404449] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Reconfigured VM instance instance-00000049 to attach disk [datastore1] b7ffe11f-2f63-419b-9ad8-0a89a05d201c/b7ffe11f-2f63-419b-9ad8-0a89a05d201c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1719.405112] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3546b281-6535-43da-baed-f44f15c224a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.413439] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1719.413439] env[62510]: value = "task-1769017" [ 1719.413439] env[62510]: _type = "Task" [ 1719.413439] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.422744] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769017, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.517547] env[62510]: DEBUG nova.scheduler.client.report [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1719.603953] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.603953] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1719.663078] env[62510]: DEBUG oslo_concurrency.lockutils [req-5cb4a463-6771-478d-98fb-bc46f8c56ebf req-84279ec1-36a2-40eb-8cfe-1183e380a2b0 service nova] Releasing lock "refresh_cache-2dce738b-9624-4a74-8b8c-042e45b693b0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.742994] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.769861] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5260a51f-609f-a7af-8fc5-1728885bc807, 'name': SearchDatastore_Task, 'duration_secs': 0.092188} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.770470] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.770684] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1719.770979] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.771224] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.771479] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.771816] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e16a0ed2-bc32-4937-8d0f-8b10d87dda84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.781805] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.782055] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1719.782869] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-576b5564-083a-4726-b965-07b3ecf9c2f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.789080] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1719.789080] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528801a3-021f-31f7-a8c6-c434f5b4f3f6" [ 1719.789080] env[62510]: _type = "Task" [ 1719.789080] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.798399] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528801a3-021f-31f7-a8c6-c434f5b4f3f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.924458] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769017, 'name': Rename_Task, 'duration_secs': 0.384336} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.924718] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1719.924984] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-728e6f65-b7ea-4eb8-8198-5e4b63bcece3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.931713] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1719.931713] env[62510]: value = "task-1769018" [ 1719.931713] env[62510]: _type = "Task" [ 1719.931713] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.940428] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.023340] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.995s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.025832] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.441s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.026054] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.027828] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.105s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.030022] env[62510]: INFO nova.compute.claims [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1720.048428] env[62510]: INFO nova.scheduler.client.report [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Deleted allocations for instance 31fe5643-dece-484f-92d6-7c7cafbd51e4 [ 1720.057366] env[62510]: INFO nova.scheduler.client.report [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Deleted allocations for instance fae7e580-ab09-4fda-9cbe-0e066ddcb85c [ 1720.106959] env[62510]: DEBUG nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1720.242417] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769012, 'name': MoveVirtualDisk_Task, 'duration_secs': 4.101055} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.242803] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8/OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8.vmdk to [datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237/b54fc7f8-0408-41bd-abcd-6c673fa40237.vmdk. [ 1720.243038] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Cleaning up location [datastore1] OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1720.243549] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_484765bc-1828-4845-8ce5-3c9a93690de8 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1720.243549] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-253a200d-3035-4da8-9822-55c6eab24f80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.251602] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1720.251602] env[62510]: value = "task-1769019" [ 1720.251602] env[62510]: _type = "Task" [ 1720.251602] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.261544] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.300767] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528801a3-021f-31f7-a8c6-c434f5b4f3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.012173} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.301598] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-404d080f-392e-4554-a216-29711091ac1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.307995] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1720.307995] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5243d2cb-635e-2ee9-865c-f3cf862c68c7" [ 1720.307995] env[62510]: _type = "Task" [ 1720.307995] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.318251] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5243d2cb-635e-2ee9-865c-f3cf862c68c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.443877] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769018, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.555162] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65fee01b-a198-4390-b55a-0f19aaaa3cd1 tempest-ServersNegativeTestMultiTenantJSON-626847861 tempest-ServersNegativeTestMultiTenantJSON-626847861-project-member] Lock "31fe5643-dece-484f-92d6-7c7cafbd51e4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.906s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.563482] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34f455cb-1a0f-437b-a4e7-5ced4fbd34b6 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "fae7e580-ab09-4fda-9cbe-0e066ddcb85c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.912s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.627665] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.762311] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040552} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.762566] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1720.762862] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237/b54fc7f8-0408-41bd-abcd-6c673fa40237.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.763071] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237/b54fc7f8-0408-41bd-abcd-6c673fa40237.vmdk to [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1720.763349] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55983eea-a06c-4f06-a312-e8d6f3ed55b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.771071] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1720.771071] env[62510]: value = "task-1769020" [ 1720.771071] env[62510]: _type = "Task" [ 1720.771071] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.779657] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.820072] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5243d2cb-635e-2ee9-865c-f3cf862c68c7, 'name': SearchDatastore_Task, 'duration_secs': 0.011763} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.820374] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.820650] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2dce738b-9624-4a74-8b8c-042e45b693b0/2dce738b-9624-4a74-8b8c-042e45b693b0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1720.820910] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffad29af-f655-4eee-906b-267268db5650 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.829854] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1720.829854] env[62510]: value = "task-1769021" [ 1720.829854] env[62510]: _type = "Task" [ 1720.829854] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.838236] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769021, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.847839] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.942503] env[62510]: DEBUG oslo_vmware.api [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769018, 'name': PowerOnVM_Task, 'duration_secs': 0.524402} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.942791] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1720.943021] env[62510]: INFO nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Took 12.68 seconds to spawn the instance on the hypervisor. [ 1720.943208] env[62510]: DEBUG nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1720.944039] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c056f18-0576-42c3-957b-21f1123750bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.044255] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "e3850272-9dae-4164-8f0e-f5513af23f49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.044255] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "e3850272-9dae-4164-8f0e-f5513af23f49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.045117] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "e3850272-9dae-4164-8f0e-f5513af23f49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.045117] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "e3850272-9dae-4164-8f0e-f5513af23f49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.045117] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "e3850272-9dae-4164-8f0e-f5513af23f49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.047570] env[62510]: INFO nova.compute.manager [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Terminating instance [ 1721.287297] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.347723] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769021, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.464551] env[62510]: INFO nova.compute.manager [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Took 56.73 seconds to build instance. [ 1721.468662] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f8bf6e-478b-4c0a-a743-66974d03b1e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.479190] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7faa553-dabb-4b84-af97-cfb71ed9bd4f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.520972] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2e7837-bb0c-4c9e-91a0-785df1dfba47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.531132] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663b1e7c-6df1-4bf7-9b47-d7f32cd573c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.550723] env[62510]: DEBUG nova.compute.provider_tree [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.552764] env[62510]: DEBUG nova.compute.manager [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1721.552896] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1721.556433] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614a28a1-4340-423e-a7c2-e3191a209882 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.562583] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1721.563481] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-092216ae-c002-4b6d-b99c-a5e2fc5b1246 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.573656] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1721.573656] env[62510]: value = "task-1769022" [ 1721.573656] env[62510]: _type = "Task" [ 1721.573656] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.585422] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1769022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.783192] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.842836] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769021, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.967253] env[62510]: DEBUG oslo_concurrency.lockutils [None req-693f0727-4aa2-4699-8e9a-1c74d1142824 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.241s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.058904] env[62510]: DEBUG nova.scheduler.client.report [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1722.075291] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "12768001-6ed0-47be-8f20-c59ee82b842a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.075446] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.075960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.076180] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.076359] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.078979] env[62510]: INFO nova.compute.manager [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Terminating instance [ 1722.096719] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1769022, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.283900] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.347198] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769021, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.567392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.539s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.568011] env[62510]: DEBUG nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1722.570806] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.203s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.571072] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.573120] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.317s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.574106] env[62510]: DEBUG nova.objects.instance [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lazy-loading 'resources' on Instance uuid c829d602-97bc-4ec8-9090-c63bed04ac79 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1722.596884] env[62510]: DEBUG nova.compute.manager [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1722.597094] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1722.597440] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1769022, 'name': PowerOffVM_Task, 'duration_secs': 0.701249} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.598588] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcf0aed-2488-47bd-9d50-9037afb7df5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.601534] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1722.601725] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1722.602338] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9985a563-6835-4b5e-a055-76503d5b0d41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.612604] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1722.612930] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e14d8992-34c5-4f8d-b232-0a78db7fab8a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.623283] env[62510]: DEBUG oslo_vmware.api [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1722.623283] env[62510]: value = "task-1769024" [ 1722.623283] env[62510]: _type = "Task" [ 1722.623283] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.630546] env[62510]: INFO nova.scheduler.client.report [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted allocations for instance 350d5f83-d9ce-4997-bf57-70c4a4e22ba0 [ 1722.635710] env[62510]: DEBUG oslo_vmware.api [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1769024, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.789190] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.833327] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1722.833759] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1722.834076] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Deleting the datastore file [datastore1] e3850272-9dae-4164-8f0e-f5513af23f49 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1722.834697] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fe9aa54-771f-4f7e-b061-fa49c4c8cc36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.855926] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769021, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.003177} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.858582] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2dce738b-9624-4a74-8b8c-042e45b693b0/2dce738b-9624-4a74-8b8c-042e45b693b0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1722.858850] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1722.859245] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for the task: (returnval){ [ 1722.859245] env[62510]: value = "task-1769025" [ 1722.859245] env[62510]: _type = "Task" [ 1722.859245] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.859464] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ec32bd8e-7dd7-4ab2-b631-adfa8645485e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.876692] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1769025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.878784] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1722.878784] env[62510]: value = "task-1769026" [ 1722.878784] env[62510]: _type = "Task" [ 1722.878784] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.889630] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.925022] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "8b079310-084b-4ba0-8a82-57d64f421c11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.925565] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8b079310-084b-4ba0-8a82-57d64f421c11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.080022] env[62510]: DEBUG nova.compute.utils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1723.088032] env[62510]: DEBUG nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1723.088032] env[62510]: DEBUG nova.network.neutron [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1723.137184] env[62510]: DEBUG nova.policy [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a406a2bf0ccd4b99ba7dcb359a9b640e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e144c0bd2d124193a65ad53de8c43039', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1723.147551] env[62510]: DEBUG oslo_vmware.api [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1769024, 'name': PowerOffVM_Task, 'duration_secs': 0.315716} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.150703] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1723.150893] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1723.151472] env[62510]: DEBUG oslo_concurrency.lockutils [None req-21f2fde3-b239-47e7-b8bb-6f25e275e184 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "350d5f83-d9ce-4997-bf57-70c4a4e22ba0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.759s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.152643] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-048b04ea-fe2c-43aa-87ef-b403fb071004 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.282092] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1723.282663] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1723.282663] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Deleting the datastore file [datastore1] 12768001-6ed0-47be-8f20-c59ee82b842a {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1723.283815] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c085a02-2fcd-49df-b50b-926dd34337ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.290645] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.298722] env[62510]: DEBUG oslo_vmware.api [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for the task: (returnval){ [ 1723.298722] env[62510]: value = "task-1769028" [ 1723.298722] env[62510]: _type = "Task" [ 1723.298722] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.308755] env[62510]: DEBUG oslo_vmware.api [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1769028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.378030] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1769025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.390155] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10252} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.390365] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1723.391455] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d196cb7b-070d-4bba-b056-0cbb9439a13a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.423015] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 2dce738b-9624-4a74-8b8c-042e45b693b0/2dce738b-9624-4a74-8b8c-042e45b693b0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1723.426691] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c7fdbc5-3ec0-4162-95b3-9a79fd20417b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.443980] env[62510]: DEBUG nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1723.463596] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1723.463596] env[62510]: value = "task-1769029" [ 1723.463596] env[62510]: _type = "Task" [ 1723.463596] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.474936] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.585313] env[62510]: DEBUG nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1723.690206] env[62510]: DEBUG nova.network.neutron [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Successfully created port: 9174aa7f-56a1-4625-be49-9a7f645e961b {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1723.727816] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea22267-d636-4711-9917-878f464533d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.737191] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8bb1f5-8b37-4f7f-9738-0bb497bbaf60 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.775408] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9750668-bc76-4245-86d9-63f414a6c0ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.792051] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.794056] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3320f207-576d-450c-9c7f-0e7c320b06de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.810504] env[62510]: DEBUG oslo_vmware.api [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1769028, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.819167] env[62510]: DEBUG nova.compute.provider_tree [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.875304] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1769025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.977288] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769029, 'name': ReconfigVM_Task, 'duration_secs': 0.435759} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.980914] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.981326] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 2dce738b-9624-4a74-8b8c-042e45b693b0/2dce738b-9624-4a74-8b8c-042e45b693b0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1723.981998] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64c306e8-614d-4cc8-8f0b-1d0929e16a21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.990923] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1723.990923] env[62510]: value = "task-1769030" [ 1723.990923] env[62510]: _type = "Task" [ 1723.990923] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.001246] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769030, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.289478] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769020, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.128605} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.289759] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b54fc7f8-0408-41bd-abcd-6c673fa40237/b54fc7f8-0408-41bd-abcd-6c673fa40237.vmdk to [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1724.292053] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc581031-5358-479d-bd32-2403e432d647 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.316828] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1724.320501] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-926ad3cd-35ee-4bbf-9f4a-21fe9e9cc599 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.342171] env[62510]: DEBUG nova.scheduler.client.report [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1724.351763] env[62510]: DEBUG oslo_vmware.api [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Task: {'id': task-1769028, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.711574} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.355312] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1724.355548] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1724.355813] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1724.355924] env[62510]: INFO nova.compute.manager [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1724.356176] env[62510]: DEBUG oslo.service.loopingcall [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1724.356476] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1724.356476] env[62510]: value = "task-1769031" [ 1724.356476] env[62510]: _type = "Task" [ 1724.356476] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.356831] env[62510]: DEBUG nova.compute.manager [-] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1724.356831] env[62510]: DEBUG nova.network.neutron [-] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1724.372135] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769031, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.380378] env[62510]: DEBUG oslo_vmware.api [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Task: {'id': task-1769025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.106402} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.380695] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1724.380815] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1724.380995] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1724.381196] env[62510]: INFO nova.compute.manager [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Took 2.83 seconds to destroy the instance on the hypervisor. [ 1724.381425] env[62510]: DEBUG oslo.service.loopingcall [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1724.381620] env[62510]: DEBUG nova.compute.manager [-] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1724.381719] env[62510]: DEBUG nova.network.neutron [-] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1724.502970] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769030, 'name': Rename_Task, 'duration_secs': 0.422941} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.503289] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1724.503543] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bf5b9e4-9fa2-4f66-9fbd-6fd1b41e28bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.511314] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1724.511314] env[62510]: value = "task-1769032" [ 1724.511314] env[62510]: _type = "Task" [ 1724.511314] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.520190] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.596772] env[62510]: DEBUG nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1724.631167] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1724.631434] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1724.631591] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1724.631940] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1724.632544] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1724.632739] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1724.632961] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1724.633142] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1724.633316] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1724.633485] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1724.633756] env[62510]: DEBUG nova.virt.hardware [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1724.635056] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc896b7-5419-4ef2-a45d-3f0efbbbfb32 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.644531] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107eb16d-b04d-431d-b229-ab8253eb1f4d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.845768] env[62510]: DEBUG nova.compute.manager [req-4c2b010a-177c-4079-b7cd-ba99b478e626 req-e4c58a96-364f-49df-a114-41addee0b28d service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Received event network-vif-deleted-7489ebb6-ec5f-4097-9a62-81a2d3dedd52 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1724.845997] env[62510]: INFO nova.compute.manager [req-4c2b010a-177c-4079-b7cd-ba99b478e626 req-e4c58a96-364f-49df-a114-41addee0b28d service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Neutron deleted interface 7489ebb6-ec5f-4097-9a62-81a2d3dedd52; detaching it from the instance and deleting it from the info cache [ 1724.846165] env[62510]: DEBUG nova.network.neutron [req-4c2b010a-177c-4079-b7cd-ba99b478e626 req-e4c58a96-364f-49df-a114-41addee0b28d service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.848275] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.275s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.851493] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 21.027s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.876501] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769031, 'name': ReconfigVM_Task, 'duration_secs': 0.279483} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.879653] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4/83fa0d32-18ee-401d-af0b-a0adb538e5f4.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1724.879653] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'encrypted': False, 'device_type': 'disk', 'size': 0, 'disk_bus': None, 'guest_format': None, 'encryption_options': None, 'boot_index': 0, 'device_name': '/dev/sda', 'encryption_format': None, 'image_id': '645af513-c243-4722-b631-714f21477ae6'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367365', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'name': 'volume-fc768519-1bbc-47ae-b9f1-9717554b1759', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '83fa0d32-18ee-401d-af0b-a0adb538e5f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'serial': 'fc768519-1bbc-47ae-b9f1-9717554b1759'}, 'attachment_id': '3c589144-ffe8-414f-a650-9a1cbb18c4ef', 'mount_device': '/dev/sdb', 'device_type': None, 'disk_bus': None, 'guest_format': None, 'boot_index': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=62510) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1724.879653] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1724.879653] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367365', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'name': 'volume-fc768519-1bbc-47ae-b9f1-9717554b1759', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '83fa0d32-18ee-401d-af0b-a0adb538e5f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'serial': 'fc768519-1bbc-47ae-b9f1-9717554b1759'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1724.880571] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3348772-c717-4425-8937-409adecfabd6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.909481] env[62510]: INFO nova.scheduler.client.report [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Deleted allocations for instance c829d602-97bc-4ec8-9090-c63bed04ac79 [ 1724.911567] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2662e878-6a88-4713-8fb6-5f1836d4aefb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.945170] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] volume-fc768519-1bbc-47ae-b9f1-9717554b1759/volume-fc768519-1bbc-47ae-b9f1-9717554b1759.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1724.945523] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-491e934e-340b-4939-9215-432036a3215c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.966283] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1724.966283] env[62510]: value = "task-1769033" [ 1724.966283] env[62510]: _type = "Task" [ 1724.966283] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.979201] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769033, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.024470] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769032, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.311467] env[62510]: DEBUG nova.network.neutron [-] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.353343] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db3e3c53-07bf-4c5c-a8cd-f28ba2a4685f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.373570] env[62510]: DEBUG nova.network.neutron [-] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.386105] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0519d8b-59e4-4c4e-9dde-930f28bc6e1b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.436414] env[62510]: DEBUG nova.compute.manager [req-4c2b010a-177c-4079-b7cd-ba99b478e626 req-e4c58a96-364f-49df-a114-41addee0b28d service nova] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Detach interface failed, port_id=7489ebb6-ec5f-4097-9a62-81a2d3dedd52, reason: Instance 12768001-6ed0-47be-8f20-c59ee82b842a could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1725.437298] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24fee006-af04-4d12-b271-1c2c97912e87 tempest-ServerPasswordTestJSON-1829647171 tempest-ServerPasswordTestJSON-1829647171-project-member] Lock "c829d602-97bc-4ec8-9090-c63bed04ac79" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.775s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.483617] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769033, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.522721] env[62510]: DEBUG oslo_vmware.api [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769032, 'name': PowerOnVM_Task, 'duration_secs': 0.524841} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.523081] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1725.523295] env[62510]: INFO nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Took 9.61 seconds to spawn the instance on the hypervisor. [ 1725.523553] env[62510]: DEBUG nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1725.524863] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ebb66cc-a331-4a3b-931e-abd047e51296 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.606157] env[62510]: DEBUG nova.compute.manager [req-ac269667-faab-4b6e-a3b7-d9c0c9d668fb req-78655024-38d3-4629-8c7a-8c1b302ee598 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Received event network-vif-plugged-9174aa7f-56a1-4625-be49-9a7f645e961b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1725.606388] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac269667-faab-4b6e-a3b7-d9c0c9d668fb req-78655024-38d3-4629-8c7a-8c1b302ee598 service nova] Acquiring lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.606650] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac269667-faab-4b6e-a3b7-d9c0c9d668fb req-78655024-38d3-4629-8c7a-8c1b302ee598 service nova] Lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.606838] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac269667-faab-4b6e-a3b7-d9c0c9d668fb req-78655024-38d3-4629-8c7a-8c1b302ee598 service nova] Lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.609044] env[62510]: DEBUG nova.compute.manager [req-ac269667-faab-4b6e-a3b7-d9c0c9d668fb req-78655024-38d3-4629-8c7a-8c1b302ee598 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] No waiting events found dispatching network-vif-plugged-9174aa7f-56a1-4625-be49-9a7f645e961b {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1725.609312] env[62510]: WARNING nova.compute.manager [req-ac269667-faab-4b6e-a3b7-d9c0c9d668fb req-78655024-38d3-4629-8c7a-8c1b302ee598 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Received unexpected event network-vif-plugged-9174aa7f-56a1-4625-be49-9a7f645e961b for instance with vm_state building and task_state spawning. [ 1725.801947] env[62510]: DEBUG nova.network.neutron [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Successfully updated port: 9174aa7f-56a1-4625-be49-9a7f645e961b {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1725.814246] env[62510]: INFO nova.compute.manager [-] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Took 1.46 seconds to deallocate network for instance. [ 1725.886291] env[62510]: INFO nova.compute.manager [-] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Took 1.50 seconds to deallocate network for instance. [ 1725.918733] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8ffa27e9-6a3b-48d1-aed4-c808089788d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 12768001-6ed0-47be-8f20-c59ee82b842a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 7cc6d4a6-2765-44e7-b378-e213a562593d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 3533a113-6f46-4b18-872d-9bc1b0481969 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 568ce58c-9ce5-4b40-988f-f31d8e0c376d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e3850272-9dae-4164-8f0e-f5513af23f49 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 83fa0d32-18ee-401d-af0b-a0adb538e5f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 841460b0-d917-44ea-88c6-0e5a3022f658 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.921783] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 16b5d928-94fe-4fd5-9909-775c28d7edd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.922231] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance c8e69231-2786-47ac-9a44-c194088b8079 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.922231] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 1d644c4f-1fd4-4251-aeef-5777d3f4b94c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1725.922231] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance b7ffe11f-2f63-419b-9ad8-0a89a05d201c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.922231] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2dce738b-9624-4a74-8b8c-042e45b693b0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.922231] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 77f485ae-9c4c-424e-8bac-6d023e428767 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1725.980555] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769033, 'name': ReconfigVM_Task, 'duration_secs': 0.7126} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.980853] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfigured VM instance instance-00000027 to attach disk [datastore1] volume-fc768519-1bbc-47ae-b9f1-9717554b1759/volume-fc768519-1bbc-47ae-b9f1-9717554b1759.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1725.988382] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32dadeb3-094f-438c-8b12-e94f4e0cc378 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.013096] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1726.013096] env[62510]: value = "task-1769034" [ 1726.013096] env[62510]: _type = "Task" [ 1726.013096] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.024542] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769034, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.046219] env[62510]: INFO nova.compute.manager [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Took 52.98 seconds to build instance. [ 1726.305683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.305880] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.306060] env[62510]: DEBUG nova.network.neutron [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1726.323165] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.393896] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.428397] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 774ea198-c933-449a-8380-2e4cc9327389 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1726.530987] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769034, 'name': ReconfigVM_Task, 'duration_secs': 0.444587} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.531644] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367365', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'name': 'volume-fc768519-1bbc-47ae-b9f1-9717554b1759', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '83fa0d32-18ee-401d-af0b-a0adb538e5f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'serial': 'fc768519-1bbc-47ae-b9f1-9717554b1759'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1726.532032] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b987cbea-6093-4c04-914f-81fa6666c45a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.544085] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1726.544085] env[62510]: value = "task-1769035" [ 1726.544085] env[62510]: _type = "Task" [ 1726.544085] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.555158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-35e01aef-f686-433f-8468-1f5dce7b2794 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.498s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.555417] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769035, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.857747] env[62510]: DEBUG nova.network.neutron [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1726.894527] env[62510]: DEBUG nova.compute.manager [req-f4bd99c2-964e-4565-a3ed-2d2ad31c2d0e req-2c28822d-5786-4fe3-abc3-4a9b2ce3516c service nova] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Received event network-vif-deleted-d769f744-6168-49b4-b195-b608fec386e8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1726.935350] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 144052ab-e3e7-401f-9edb-d8088780e468 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1727.055528] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769035, 'name': Rename_Task, 'duration_secs': 0.436436} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.055843] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1727.056112] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0dc4a38-bc32-4069-aea6-30e55a546740 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.064329] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1727.064329] env[62510]: value = "task-1769036" [ 1727.064329] env[62510]: _type = "Task" [ 1727.064329] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.073666] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.084790] env[62510]: DEBUG nova.network.neutron [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance_info_cache with network_info: [{"id": "9174aa7f-56a1-4625-be49-9a7f645e961b", "address": "fa:16:3e:c3:e4:7b", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9174aa7f-56", "ovs_interfaceid": "9174aa7f-56a1-4625-be49-9a7f645e961b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.441270] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1727.442131] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance with task_state "unshelving" is not being actively managed by this compute host but has allocations referencing this compute node (c3653102-341b-4ed1-8b1f-1abaf8aa3e56): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1727.578028] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.592078] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.592078] env[62510]: DEBUG nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Instance network_info: |[{"id": "9174aa7f-56a1-4625-be49-9a7f645e961b", "address": "fa:16:3e:c3:e4:7b", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9174aa7f-56", "ovs_interfaceid": "9174aa7f-56a1-4625-be49-9a7f645e961b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1727.592078] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:e4:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9174aa7f-56a1-4625-be49-9a7f645e961b', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1727.601369] env[62510]: DEBUG oslo.service.loopingcall [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.602378] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1727.602378] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-420d5bbd-587c-435f-ba1a-5385bf793438 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.629022] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1727.629022] env[62510]: value = "task-1769037" [ 1727.629022] env[62510]: _type = "Task" [ 1727.629022] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.638450] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769037, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.781723] env[62510]: DEBUG nova.compute.manager [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Received event network-changed-9174aa7f-56a1-4625-be49-9a7f645e961b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1727.782965] env[62510]: DEBUG nova.compute.manager [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Refreshing instance network info cache due to event network-changed-9174aa7f-56a1-4625-be49-9a7f645e961b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1727.783493] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] Acquiring lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.783493] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] Acquired lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.783596] env[62510]: DEBUG nova.network.neutron [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Refreshing network info cache for port 9174aa7f-56a1-4625-be49-9a7f645e961b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1727.949099] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8b079310-084b-4ba0-8a82-57d64f421c11 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1727.949559] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1727.949637] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1728.079767] env[62510]: DEBUG oslo_vmware.api [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769036, 'name': PowerOnVM_Task, 'duration_secs': 0.704519} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.080342] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1728.142143] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769037, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.240111] env[62510]: DEBUG nova.compute.manager [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1728.241018] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8231e22f-7ad8-4d4b-965f-ccfabf675d4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.397813] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673de337-94b8-4fab-ab3f-077332906b08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.410914] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1680a03a-a314-45d3-a290-cb4f4ac4b9a3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.445482] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc1b36d-d9c5-4c69-aedb-9f9e5bb6549c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.454627] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31cf2fa6-3015-4de5-858a-17508a8611f1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.470097] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.589172] env[62510]: DEBUG nova.network.neutron [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updated VIF entry in instance network info cache for port 9174aa7f-56a1-4625-be49-9a7f645e961b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1728.589688] env[62510]: DEBUG nova.network.neutron [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance_info_cache with network_info: [{"id": "9174aa7f-56a1-4625-be49-9a7f645e961b", "address": "fa:16:3e:c3:e4:7b", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9174aa7f-56", "ovs_interfaceid": "9174aa7f-56a1-4625-be49-9a7f645e961b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.640488] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769037, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.767467] env[62510]: DEBUG oslo_concurrency.lockutils [None req-876f32a2-2f1b-4435-bdfc-ac731983d036 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 75.755s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.976021] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1729.093379] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8b91dbd-9d20-49bf-896a-f6eecb266634 req-cf16130e-1f16-4d6c-8499-2d182e7325a8 service nova] Releasing lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.145207] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769037, 'name': CreateVM_Task, 'duration_secs': 1.458813} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.145460] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1729.146524] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.146920] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.147331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1729.147712] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85bc32d0-8916-4ada-98fb-65b428e40201 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.153031] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1729.153031] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f44f0a-6143-d6d0-0dff-0df18b96b46e" [ 1729.153031] env[62510]: _type = "Task" [ 1729.153031] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.167311] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f44f0a-6143-d6d0-0dff-0df18b96b46e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.479812] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1729.479812] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.628s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.479812] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.734s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.479812] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.481970] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.574s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.483954] env[62510]: INFO nova.compute.claims [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1729.504222] env[62510]: INFO nova.scheduler.client.report [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted allocations for instance ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3 [ 1729.664025] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f44f0a-6143-d6d0-0dff-0df18b96b46e, 'name': SearchDatastore_Task, 'duration_secs': 0.01263} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.664303] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.664553] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1729.664789] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.664935] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.665125] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1729.665461] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c7db304-0d06-4a1a-bc9c-eb59d3730b38 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.669565] env[62510]: DEBUG nova.compute.manager [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1729.670332] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8998c1f-6732-49e0-9a90-0395fd2a7e90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.674553] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1729.674782] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1729.676721] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0735ab72-1ad0-43a5-914b-135869eb3b4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.685031] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1729.685031] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5201cafd-eb7d-0f10-3cd7-3727adc212e7" [ 1729.685031] env[62510]: _type = "Task" [ 1729.685031] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.691826] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5201cafd-eb7d-0f10-3cd7-3727adc212e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.789845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.790109] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.013576] env[62510]: DEBUG oslo_concurrency.lockutils [None req-318b95aa-5cee-4e0a-9fb4-d70b6d6dce35 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.126s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.182793] env[62510]: INFO nova.compute.manager [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] instance snapshotting [ 1730.189641] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5883fc-8ece-4110-a8fe-7c7d207d0f59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.198755] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5201cafd-eb7d-0f10-3cd7-3727adc212e7, 'name': SearchDatastore_Task, 'duration_secs': 0.009404} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.213673] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6cdefdd-2f37-4af0-a400-12311f0f5983 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.216526] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7825067f-e4b4-4973-bebc-adb0094bb52d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.222897] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1730.222897] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5293fa54-5db6-1ea8-7c6f-dc77ac92375e" [ 1730.222897] env[62510]: _type = "Task" [ 1730.222897] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.236819] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5293fa54-5db6-1ea8-7c6f-dc77ac92375e, 'name': SearchDatastore_Task, 'duration_secs': 0.011477} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.237104] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.237366] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 77f485ae-9c4c-424e-8bac-6d023e428767/77f485ae-9c4c-424e-8bac-6d023e428767.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1730.237626] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c559558-1e27-4093-8a61-2bd063c7b521 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.245575] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1730.245575] env[62510]: value = "task-1769038" [ 1730.245575] env[62510]: _type = "Task" [ 1730.245575] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.254536] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.295023] env[62510]: DEBUG nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1730.731864] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1730.732188] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2db89119-0243-4d8d-8f76-e87f3275eab9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.742428] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1730.742428] env[62510]: value = "task-1769039" [ 1730.742428] env[62510]: _type = "Task" [ 1730.742428] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.756458] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769039, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.762739] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769038, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465557} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.762879] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 77f485ae-9c4c-424e-8bac-6d023e428767/77f485ae-9c4c-424e-8bac-6d023e428767.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1730.763117] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1730.763392] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1f758d5-69d2-43e7-a29f-dde6de0284b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.770251] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1730.770251] env[62510]: value = "task-1769040" [ 1730.770251] env[62510]: _type = "Task" [ 1730.770251] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.778620] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.818362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.850995] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6bef2a-d2fd-436d-913f-46fc3846c7ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.859904] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4a0c1f-8a9e-4806-a111-58f6280be617 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.892463] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cad9e2-53be-4992-8076-e403d5a790d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.900828] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42a507c-cb95-4df7-980e-325d4e9a1b06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.914833] env[62510]: DEBUG nova.compute.provider_tree [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.070392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "7cc6d4a6-2765-44e7-b378-e213a562593d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.070790] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.071108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "7cc6d4a6-2765-44e7-b378-e213a562593d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.071416] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.071704] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.074213] env[62510]: INFO nova.compute.manager [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Terminating instance [ 1731.253161] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769039, 'name': CreateSnapshot_Task, 'duration_secs': 0.490493} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.253424] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1731.254174] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a191150-fdcb-4fd6-bd7c-a2287d0d01a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.279801] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065191} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.280076] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1731.280848] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e1f878-8fad-48dc-92e0-1432f6d0a552 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.303144] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 77f485ae-9c4c-424e-8bac-6d023e428767/77f485ae-9c4c-424e-8bac-6d023e428767.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1731.304119] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4e941bb-df42-4fec-939a-354f3cfc4daf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.324584] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1731.324584] env[62510]: value = "task-1769041" [ 1731.324584] env[62510]: _type = "Task" [ 1731.324584] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.335056] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769041, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.418442] env[62510]: DEBUG nova.scheduler.client.report [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1731.578364] env[62510]: DEBUG nova.compute.manager [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1731.578627] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1731.579620] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6506c650-0550-48e6-b2a3-be3be6970439 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.588433] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1731.588719] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1e28209-c841-46f6-8266-e4cbcf2ef7a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.596809] env[62510]: DEBUG oslo_vmware.api [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1731.596809] env[62510]: value = "task-1769042" [ 1731.596809] env[62510]: _type = "Task" [ 1731.596809] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.607925] env[62510]: DEBUG oslo_vmware.api [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.772891] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1731.773229] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fea51db3-5844-4523-b9d0-00d39b43592e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.783181] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1731.783181] env[62510]: value = "task-1769043" [ 1731.783181] env[62510]: _type = "Task" [ 1731.783181] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.792371] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769043, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.834768] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769041, 'name': ReconfigVM_Task, 'duration_secs': 0.326187} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.834768] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 77f485ae-9c4c-424e-8bac-6d023e428767/77f485ae-9c4c-424e-8bac-6d023e428767.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1731.835295] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e9d76d4-005d-4296-b758-8c2d4cbacb3e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.845805] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1731.845805] env[62510]: value = "task-1769044" [ 1731.845805] env[62510]: _type = "Task" [ 1731.845805] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.867151] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769044, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.923447] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.923997] env[62510]: DEBUG nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1731.926858] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.289s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.928407] env[62510]: INFO nova.compute.claims [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1732.107697] env[62510]: DEBUG oslo_vmware.api [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769042, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.295505] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769043, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.355464] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769044, 'name': Rename_Task, 'duration_secs': 0.207316} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.355761] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1732.356011] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46c20ef1-d398-47f2-b252-1436ee8f2c89 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.363729] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1732.363729] env[62510]: value = "task-1769045" [ 1732.363729] env[62510]: _type = "Task" [ 1732.363729] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.371794] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.432527] env[62510]: DEBUG nova.compute.utils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1732.436442] env[62510]: DEBUG nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1732.436442] env[62510]: DEBUG nova.network.neutron [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1732.474013] env[62510]: DEBUG nova.policy [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbc6eced57ea45fdafc3635a58fb3611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f878b652f01c48139bfc6996e5e32f5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1732.607972] env[62510]: DEBUG oslo_vmware.api [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769042, 'name': PowerOffVM_Task, 'duration_secs': 0.608801} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.608276] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1732.608444] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1732.608690] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-663b0c48-b8c1-4c3c-bb46-1839466c4ddf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.692361] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1732.692570] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1732.692756] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleting the datastore file [datastore1] 7cc6d4a6-2765-44e7-b378-e213a562593d {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1732.693040] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6aac7b70-4eea-4ec7-a16e-b4a0f71812e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.703582] env[62510]: DEBUG oslo_vmware.api [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1732.703582] env[62510]: value = "task-1769047" [ 1732.703582] env[62510]: _type = "Task" [ 1732.703582] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.713178] env[62510]: DEBUG oslo_vmware.api [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.726289] env[62510]: DEBUG nova.network.neutron [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Successfully created port: 94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1732.795838] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769043, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.880021] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769045, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.937886] env[62510]: DEBUG nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1733.218982] env[62510]: DEBUG oslo_vmware.api [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192636} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.219796] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1733.219796] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1733.219796] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1733.219796] env[62510]: INFO nova.compute.manager [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1733.220089] env[62510]: DEBUG oslo.service.loopingcall [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1733.220242] env[62510]: DEBUG nova.compute.manager [-] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1733.220387] env[62510]: DEBUG nova.network.neutron [-] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1733.303195] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769043, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.303195] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50d28cb-2e18-4ff5-97c9-d8f7be93dbcf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.312329] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8d74b0-06b2-4198-bd92-04116aec0048 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.358935] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c544a9c8-7726-464a-b2fa-eee516e44b74 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.371943] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c69b26f-97ce-4e07-a64a-f0d184cfbcef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.384518] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769045, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.393513] env[62510]: DEBUG nova.compute.provider_tree [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1733.698444] env[62510]: DEBUG nova.compute.manager [req-fc23f3e3-fee6-4b8b-83da-662b12a44bab req-75e0b821-8f89-4ca1-a78b-c24126364b72 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Received event network-vif-deleted-13e613d5-690a-4054-8ca3-e768cb4625ea {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1733.698657] env[62510]: INFO nova.compute.manager [req-fc23f3e3-fee6-4b8b-83da-662b12a44bab req-75e0b821-8f89-4ca1-a78b-c24126364b72 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Neutron deleted interface 13e613d5-690a-4054-8ca3-e768cb4625ea; detaching it from the instance and deleting it from the info cache [ 1733.698842] env[62510]: DEBUG nova.network.neutron [req-fc23f3e3-fee6-4b8b-83da-662b12a44bab req-75e0b821-8f89-4ca1-a78b-c24126364b72 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.795057] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769043, 'name': CloneVM_Task, 'duration_secs': 1.679791} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.795439] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Created linked-clone VM from snapshot [ 1733.796033] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adaead5-2262-4469-9b02-7d3318aa8811 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.803955] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Uploading image 36fa0254-cd09-473e-ae37-fbce932e21dd {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1733.817768] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1733.818059] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-dfd6ebdc-4da9-4d38-9311-05938891ebdf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.826232] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1733.826232] env[62510]: value = "task-1769048" [ 1733.826232] env[62510]: _type = "Task" [ 1733.826232] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.835109] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769048, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.878728] env[62510]: DEBUG oslo_vmware.api [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769045, 'name': PowerOnVM_Task, 'duration_secs': 1.230245} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.879126] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1733.879413] env[62510]: INFO nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Took 9.28 seconds to spawn the instance on the hypervisor. [ 1733.879668] env[62510]: DEBUG nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1733.880631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec7b27d-ead1-4aca-b286-0e632dbddeb5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.896999] env[62510]: DEBUG nova.scheduler.client.report [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1733.949578] env[62510]: DEBUG nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1733.975304] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1733.975611] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1733.975696] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1733.976806] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1733.976806] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1733.976806] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1733.976806] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1733.976806] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1733.976806] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1733.977251] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1733.977251] env[62510]: DEBUG nova.virt.hardware [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1733.978165] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6256413-ca83-468c-805f-6a23901dacbf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.986957] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50b1905-c4eb-46ee-ae66-276d8b996117 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.080855] env[62510]: DEBUG nova.network.neutron [-] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.156821] env[62510]: DEBUG nova.compute.manager [req-319e549c-18ae-42ad-8123-e79d08a62ecf req-a0f532e3-2450-4a82-9bd8-cb5fac637a10 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Received event network-vif-plugged-94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1734.157104] env[62510]: DEBUG oslo_concurrency.lockutils [req-319e549c-18ae-42ad-8123-e79d08a62ecf req-a0f532e3-2450-4a82-9bd8-cb5fac637a10 service nova] Acquiring lock "774ea198-c933-449a-8380-2e4cc9327389-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.157342] env[62510]: DEBUG oslo_concurrency.lockutils [req-319e549c-18ae-42ad-8123-e79d08a62ecf req-a0f532e3-2450-4a82-9bd8-cb5fac637a10 service nova] Lock "774ea198-c933-449a-8380-2e4cc9327389-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.157508] env[62510]: DEBUG oslo_concurrency.lockutils [req-319e549c-18ae-42ad-8123-e79d08a62ecf req-a0f532e3-2450-4a82-9bd8-cb5fac637a10 service nova] Lock "774ea198-c933-449a-8380-2e4cc9327389-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.157680] env[62510]: DEBUG nova.compute.manager [req-319e549c-18ae-42ad-8123-e79d08a62ecf req-a0f532e3-2450-4a82-9bd8-cb5fac637a10 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] No waiting events found dispatching network-vif-plugged-94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1734.157851] env[62510]: WARNING nova.compute.manager [req-319e549c-18ae-42ad-8123-e79d08a62ecf req-a0f532e3-2450-4a82-9bd8-cb5fac637a10 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Received unexpected event network-vif-plugged-94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec for instance with vm_state building and task_state spawning. [ 1734.201118] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8af3dabe-c3dd-422f-9141-f0ce80dfa884 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.212846] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e9d3b7-f9d7-4bb2-9ed7-b70ca2a091e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.256345] env[62510]: DEBUG nova.compute.manager [req-fc23f3e3-fee6-4b8b-83da-662b12a44bab req-75e0b821-8f89-4ca1-a78b-c24126364b72 service nova] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Detach interface failed, port_id=13e613d5-690a-4054-8ca3-e768cb4625ea, reason: Instance 7cc6d4a6-2765-44e7-b378-e213a562593d could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1734.257580] env[62510]: DEBUG nova.network.neutron [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Successfully updated port: 94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1734.336943] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769048, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.398420] env[62510]: INFO nova.compute.manager [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Took 43.49 seconds to build instance. [ 1734.402900] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.476s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.403459] env[62510]: DEBUG nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1734.406036] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.255s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.406243] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.408590] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.133s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.408960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.411095] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.784s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.412539] env[62510]: INFO nova.compute.claims [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1734.446444] env[62510]: INFO nova.scheduler.client.report [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Deleted allocations for instance 1d644c4f-1fd4-4251-aeef-5777d3f4b94c [ 1734.583531] env[62510]: INFO nova.compute.manager [-] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Took 1.36 seconds to deallocate network for instance. [ 1734.760990] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-774ea198-c933-449a-8380-2e4cc9327389" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.761231] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-774ea198-c933-449a-8380-2e4cc9327389" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.761400] env[62510]: DEBUG nova.network.neutron [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1734.838689] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769048, 'name': Destroy_Task, 'duration_secs': 0.772064} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.839141] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Destroyed the VM [ 1734.839237] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1734.839429] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5f9e4009-2a33-4112-9b6c-951c0d838426 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.846186] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1734.846186] env[62510]: value = "task-1769049" [ 1734.846186] env[62510]: _type = "Task" [ 1734.846186] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.854575] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769049, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.900784] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9e237b42-5b12-4810-81bf-7d8b42a88464 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.919596] env[62510]: DEBUG nova.compute.utils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1734.923450] env[62510]: DEBUG nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1734.923681] env[62510]: DEBUG nova.network.neutron [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1734.925643] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ce6563ac-0a0a-495f-91bc-a6d4721b1092 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 38.562s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.926570] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 14.079s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.926826] env[62510]: INFO nova.compute.manager [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Unshelving [ 1734.954019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9af1c1a2-1ecc-440c-b170-c5cfb6a171e0 tempest-ServerGroupTestJSON-669527244 tempest-ServerGroupTestJSON-669527244-project-member] Lock "1d644c4f-1fd4-4251-aeef-5777d3f4b94c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.626s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.968916] env[62510]: DEBUG nova.policy [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0b465ab9caf4d989219f1fbbebd00ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd98518565b744451ba90ba301267213f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1735.090424] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.221829] env[62510]: DEBUG nova.network.neutron [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Successfully created port: ff1d0698-2d5b-4d43-b576-87f41d637e5a {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1735.302814] env[62510]: DEBUG nova.network.neutron [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1735.360931] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769049, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.424090] env[62510]: DEBUG nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1735.548807] env[62510]: DEBUG nova.network.neutron [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Updating instance_info_cache with network_info: [{"id": "94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec", "address": "fa:16:3e:72:65:85", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94db8ca3-77", "ovs_interfaceid": "94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1735.785143] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55b7142-66fd-4f46-8a80-5b35b666aa80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.796209] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b5fb55-1470-4e40-a637-654686cf7751 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.831736] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7881e3d6-5967-4978-ad24-9da5d07cb37d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.840720] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b763f8-043d-4153-bf6d-809eb901aec6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.859299] env[62510]: DEBUG nova.compute.provider_tree [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.868138] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769049, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.955492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.056342] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-774ea198-c933-449a-8380-2e4cc9327389" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.056342] env[62510]: DEBUG nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Instance network_info: |[{"id": "94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec", "address": "fa:16:3e:72:65:85", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94db8ca3-77", "ovs_interfaceid": "94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1736.056779] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:65:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1736.064905] env[62510]: DEBUG oslo.service.loopingcall [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.065174] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1736.065411] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c425b80f-682d-4889-a3d0-e202cd860803 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.093612] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1736.093612] env[62510]: value = "task-1769050" [ 1736.093612] env[62510]: _type = "Task" [ 1736.093612] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.103263] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769050, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.185769] env[62510]: DEBUG nova.compute.manager [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Received event network-changed-94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1736.186123] env[62510]: DEBUG nova.compute.manager [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Refreshing instance network info cache due to event network-changed-94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1736.186484] env[62510]: DEBUG oslo_concurrency.lockutils [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] Acquiring lock "refresh_cache-774ea198-c933-449a-8380-2e4cc9327389" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.186717] env[62510]: DEBUG oslo_concurrency.lockutils [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] Acquired lock "refresh_cache-774ea198-c933-449a-8380-2e4cc9327389" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.186895] env[62510]: DEBUG nova.network.neutron [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Refreshing network info cache for port 94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1736.359717] env[62510]: DEBUG oslo_vmware.api [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769049, 'name': RemoveSnapshot_Task, 'duration_secs': 1.064235} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.360056] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1736.363441] env[62510]: DEBUG nova.scheduler.client.report [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1736.443733] env[62510]: DEBUG nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1736.452820] env[62510]: DEBUG nova.compute.manager [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1736.489351] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='463b776d7ab2475fdd328eda53f72abb',container_format='bare',created_at=2024-12-11T19:39:47Z,direct_url=,disk_format='vmdk',id=f6223ac6-801a-4cf1-b252-449e54e92fe4,min_disk=1,min_ram=0,name='tempest-test-snap-262590727',owner='d98518565b744451ba90ba301267213f',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-11T19:40:02Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1736.490027] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1736.490027] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1736.490027] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1736.490201] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1736.490283] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1736.490417] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1736.490811] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1736.490811] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1736.490916] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1736.491103] env[62510]: DEBUG nova.virt.hardware [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1736.492015] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304686aa-8c8e-412c-8664-035e4d8249ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.502641] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd467079-03ff-45a4-b7c6-36d6c1d20a4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.603545] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769050, 'name': CreateVM_Task, 'duration_secs': 0.402189} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.603729] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1736.604507] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.604943] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.605340] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1736.605659] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8179e331-24f1-4d45-979e-d8de6cff8121 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.610743] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1736.610743] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5248c590-e198-d250-764d-0fd126ae84e4" [ 1736.610743] env[62510]: _type = "Task" [ 1736.610743] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.621368] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5248c590-e198-d250-764d-0fd126ae84e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.802598] env[62510]: DEBUG nova.network.neutron [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Successfully updated port: ff1d0698-2d5b-4d43-b576-87f41d637e5a {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1736.869632] env[62510]: WARNING nova.compute.manager [None req-8dfdc59a-ca0c-4ba3-a885-d108ba58bab2 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Image not found during snapshot: nova.exception.ImageNotFound: Image 36fa0254-cd09-473e-ae37-fbce932e21dd could not be found. [ 1736.871640] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.872128] env[62510]: DEBUG nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1736.875072] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.894s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.877211] env[62510]: INFO nova.compute.claims [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1736.977369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.014616] env[62510]: DEBUG nova.network.neutron [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Updated VIF entry in instance network info cache for port 94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1737.014859] env[62510]: DEBUG nova.network.neutron [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Updating instance_info_cache with network_info: [{"id": "94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec", "address": "fa:16:3e:72:65:85", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94db8ca3-77", "ovs_interfaceid": "94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.125145] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5248c590-e198-d250-764d-0fd126ae84e4, 'name': SearchDatastore_Task, 'duration_secs': 0.04072} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.125469] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.125711] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1737.125981] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.126112] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.126292] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1737.126624] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96979c77-a266-4fac-9fb7-11a9584127ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.139833] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1737.140043] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1737.140788] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33796f93-4324-467f-986d-4ab15c8e4837 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.148426] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1737.148426] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52082905-dd80-d4e9-71a5-599f7c7659f7" [ 1737.148426] env[62510]: _type = "Task" [ 1737.148426] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.156898] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52082905-dd80-d4e9-71a5-599f7c7659f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.305670] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "refresh_cache-144052ab-e3e7-401f-9edb-d8088780e468" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.306072] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "refresh_cache-144052ab-e3e7-401f-9edb-d8088780e468" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.306283] env[62510]: DEBUG nova.network.neutron [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1737.392127] env[62510]: DEBUG nova.compute.utils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1737.394121] env[62510]: DEBUG nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1737.394308] env[62510]: DEBUG nova.network.neutron [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1737.448051] env[62510]: DEBUG nova.policy [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6decc076b3da4d1b86c6aa73f1cf2674', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86abf24d608d4c438161dc0b8335dea1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1737.518121] env[62510]: DEBUG oslo_concurrency.lockutils [req-de386aec-aad6-4d10-a8d2-c3a124f01be6 req-b46f8af0-7536-4e73-8354-a68e59858ef5 service nova] Releasing lock "refresh_cache-774ea198-c933-449a-8380-2e4cc9327389" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.660100] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52082905-dd80-d4e9-71a5-599f7c7659f7, 'name': SearchDatastore_Task, 'duration_secs': 0.043416} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.661085] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fde63d1a-0b33-43ef-ae35-224ea3e90396 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.673626] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1737.673626] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520b9751-e42d-8c5c-ee02-062d991df4b0" [ 1737.673626] env[62510]: _type = "Task" [ 1737.673626] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.686440] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520b9751-e42d-8c5c-ee02-062d991df4b0, 'name': SearchDatastore_Task, 'duration_secs': 0.015812} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.686739] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.687010] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 774ea198-c933-449a-8380-2e4cc9327389/774ea198-c933-449a-8380-2e4cc9327389.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1737.687300] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08dea500-6253-4bf9-847e-9185d7a32208 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.696936] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1737.696936] env[62510]: value = "task-1769051" [ 1737.696936] env[62510]: _type = "Task" [ 1737.696936] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.708942] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769051, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.882339] env[62510]: DEBUG nova.network.neutron [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1737.897443] env[62510]: DEBUG nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1737.958455] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "2dce738b-9624-4a74-8b8c-042e45b693b0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.958719] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.958975] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "2dce738b-9624-4a74-8b8c-042e45b693b0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.961324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.961554] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.964740] env[62510]: INFO nova.compute.manager [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Terminating instance [ 1738.134048] env[62510]: DEBUG nova.network.neutron [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Updating instance_info_cache with network_info: [{"id": "ff1d0698-2d5b-4d43-b576-87f41d637e5a", "address": "fa:16:3e:49:f1:f1", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1d0698-2d", "ovs_interfaceid": "ff1d0698-2d5b-4d43-b576-87f41d637e5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.212299] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769051, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.355039] env[62510]: DEBUG nova.network.neutron [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Successfully created port: 4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1738.412678] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0545e9-7cf3-4b5f-b766-14e76f0b7f2c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.423153] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5d1c8b-b414-4380-9f77-0b109e04d520 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.457763] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21656938-e962-47c3-bfdc-1b1b270c5007 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.465266] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918ddc4f-f1c8-469d-9f3a-451eaa6dfc51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.472048] env[62510]: DEBUG nova.compute.manager [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1738.472322] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1738.481371] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6eb541-5d03-4f23-8770-15c9a3f3ddde {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.484623] env[62510]: DEBUG nova.compute.provider_tree [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1738.492368] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1738.492964] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c36d564-5ab4-4231-b4d6-f11363117912 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.501094] env[62510]: DEBUG oslo_vmware.api [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1738.501094] env[62510]: value = "task-1769052" [ 1738.501094] env[62510]: _type = "Task" [ 1738.501094] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.511147] env[62510]: DEBUG oslo_vmware.api [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.577344] env[62510]: DEBUG nova.compute.manager [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Received event network-vif-plugged-ff1d0698-2d5b-4d43-b576-87f41d637e5a {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1738.577578] env[62510]: DEBUG oslo_concurrency.lockutils [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] Acquiring lock "144052ab-e3e7-401f-9edb-d8088780e468-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.577913] env[62510]: DEBUG oslo_concurrency.lockutils [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] Lock "144052ab-e3e7-401f-9edb-d8088780e468-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.578164] env[62510]: DEBUG oslo_concurrency.lockutils [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] Lock "144052ab-e3e7-401f-9edb-d8088780e468-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.578495] env[62510]: DEBUG nova.compute.manager [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] No waiting events found dispatching network-vif-plugged-ff1d0698-2d5b-4d43-b576-87f41d637e5a {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1738.578694] env[62510]: WARNING nova.compute.manager [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Received unexpected event network-vif-plugged-ff1d0698-2d5b-4d43-b576-87f41d637e5a for instance with vm_state building and task_state spawning. [ 1738.578885] env[62510]: DEBUG nova.compute.manager [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Received event network-changed-ff1d0698-2d5b-4d43-b576-87f41d637e5a {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1738.579082] env[62510]: DEBUG nova.compute.manager [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Refreshing instance network info cache due to event network-changed-ff1d0698-2d5b-4d43-b576-87f41d637e5a. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1738.579238] env[62510]: DEBUG oslo_concurrency.lockutils [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] Acquiring lock "refresh_cache-144052ab-e3e7-401f-9edb-d8088780e468" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.637416] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "refresh_cache-144052ab-e3e7-401f-9edb-d8088780e468" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.638030] env[62510]: DEBUG nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Instance network_info: |[{"id": "ff1d0698-2d5b-4d43-b576-87f41d637e5a", "address": "fa:16:3e:49:f1:f1", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1d0698-2d", "ovs_interfaceid": "ff1d0698-2d5b-4d43-b576-87f41d637e5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1738.638479] env[62510]: DEBUG oslo_concurrency.lockutils [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] Acquired lock "refresh_cache-144052ab-e3e7-401f-9edb-d8088780e468" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.638853] env[62510]: DEBUG nova.network.neutron [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Refreshing network info cache for port ff1d0698-2d5b-4d43-b576-87f41d637e5a {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1738.640714] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:f1:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4e52d8a-b086-4333-a5a1-938680a2d2bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff1d0698-2d5b-4d43-b576-87f41d637e5a', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1738.654286] env[62510]: DEBUG oslo.service.loopingcall [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.659050] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1738.659729] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c595ee04-0965-42ce-aeea-b66a92f056c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.694696] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1738.694696] env[62510]: value = "task-1769053" [ 1738.694696] env[62510]: _type = "Task" [ 1738.694696] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.715234] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769053, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.721049] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769051, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602739} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.724645] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 774ea198-c933-449a-8380-2e4cc9327389/774ea198-c933-449a-8380-2e4cc9327389.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1738.725040] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1738.725838] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-75affb32-e2ad-46ff-9cb6-c904260e44ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.741534] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1738.741534] env[62510]: value = "task-1769054" [ 1738.741534] env[62510]: _type = "Task" [ 1738.741534] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.756032] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769054, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.912913] env[62510]: DEBUG nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1738.988445] env[62510]: DEBUG nova.scheduler.client.report [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1739.009688] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1739.009688] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1739.009966] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1739.010081] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1739.010219] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1739.010363] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1739.010573] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1739.010734] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1739.010902] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1739.011076] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1739.011245] env[62510]: DEBUG nova.virt.hardware [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1739.012162] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f563fdf-e276-49a8-9bce-458e2c914273 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.025215] env[62510]: DEBUG oslo_vmware.api [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769052, 'name': PowerOffVM_Task, 'duration_secs': 0.406073} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.028280] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1739.028482] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1739.028841] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46a0bd7e-1872-4c7f-bd47-0ad867a854de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.031922] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ba9288-a23d-40b7-a29c-f63fff1ff598 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.037506] env[62510]: DEBUG nova.network.neutron [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Updated VIF entry in instance network info cache for port ff1d0698-2d5b-4d43-b576-87f41d637e5a. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1739.037928] env[62510]: DEBUG nova.network.neutron [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Updating instance_info_cache with network_info: [{"id": "ff1d0698-2d5b-4d43-b576-87f41d637e5a", "address": "fa:16:3e:49:f1:f1", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1d0698-2d", "ovs_interfaceid": "ff1d0698-2d5b-4d43-b576-87f41d637e5a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1739.201666] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1739.201888] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1739.202110] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleting the datastore file [datastore1] 2dce738b-9624-4a74-8b8c-042e45b693b0 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1739.202854] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ced9af3-cf03-4ba1-8d43-b7dc47025a41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.208212] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769053, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.212851] env[62510]: DEBUG oslo_vmware.api [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1739.212851] env[62510]: value = "task-1769056" [ 1739.212851] env[62510]: _type = "Task" [ 1739.212851] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.220678] env[62510]: DEBUG oslo_vmware.api [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.250967] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769054, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192228} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.251246] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1739.252229] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bba284-b371-4a3b-9373-27e262d3a90d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.274678] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 774ea198-c933-449a-8380-2e4cc9327389/774ea198-c933-449a-8380-2e4cc9327389.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1739.274924] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aab3a2a6-7c9c-4f5d-9cd5-68efe92ef177 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.296274] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1739.296274] env[62510]: value = "task-1769057" [ 1739.296274] env[62510]: _type = "Task" [ 1739.296274] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.305098] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769057, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.495438] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.496193] env[62510]: DEBUG nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1739.499628] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.178s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.499911] env[62510]: DEBUG nova.objects.instance [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lazy-loading 'resources' on Instance uuid 12768001-6ed0-47be-8f20-c59ee82b842a {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1739.549709] env[62510]: DEBUG oslo_concurrency.lockutils [req-acc0a1e1-1a99-41e3-9f02-2361f17accbf req-08aaa8d0-69ac-4119-96f5-a4d65f2dc442 service nova] Releasing lock "refresh_cache-144052ab-e3e7-401f-9edb-d8088780e468" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.708904] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769053, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.722739] env[62510]: DEBUG oslo_vmware.api [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.468913} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.723036] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1739.723235] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1739.723410] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1739.723583] env[62510]: INFO nova.compute.manager [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1739.723828] env[62510]: DEBUG oslo.service.loopingcall [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1739.724037] env[62510]: DEBUG nova.compute.manager [-] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1739.724141] env[62510]: DEBUG nova.network.neutron [-] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1739.807214] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769057, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.002876] env[62510]: DEBUG nova.compute.utils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1740.008088] env[62510]: DEBUG nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1740.008088] env[62510]: DEBUG nova.network.neutron [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1740.144382] env[62510]: DEBUG nova.policy [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97a7f1ca55d549a3985e95b6bbc665f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94a46473611d4b22be7c66c909d1b348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1740.209935] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769053, 'name': CreateVM_Task, 'duration_secs': 1.270955} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.219024] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1740.219024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.219024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.219024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1740.219024] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-826d1756-184a-4ecf-8106-9fe5f09f42c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.224975] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1740.224975] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5212201f-e79d-a779-ae83-1a272fbaf1a3" [ 1740.224975] env[62510]: _type = "Task" [ 1740.224975] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.238232] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5212201f-e79d-a779-ae83-1a272fbaf1a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.310124] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769057, 'name': ReconfigVM_Task, 'duration_secs': 0.955015} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.310597] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 774ea198-c933-449a-8380-2e4cc9327389/774ea198-c933-449a-8380-2e4cc9327389.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1740.311418] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0198ad29-7992-44c9-a9bd-cbf91606509c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.329363] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1740.329363] env[62510]: value = "task-1769058" [ 1740.329363] env[62510]: _type = "Task" [ 1740.329363] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.340706] env[62510]: DEBUG nova.compute.manager [req-f7a2fc8f-1ebf-4334-ac42-b918e722f983 req-44273254-a4d8-4190-bf66-d8468be474d3 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Received event network-vif-plugged-4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1740.341230] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7a2fc8f-1ebf-4334-ac42-b918e722f983 req-44273254-a4d8-4190-bf66-d8468be474d3 service nova] Acquiring lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.342065] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7a2fc8f-1ebf-4334-ac42-b918e722f983 req-44273254-a4d8-4190-bf66-d8468be474d3 service nova] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.347223] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7a2fc8f-1ebf-4334-ac42-b918e722f983 req-44273254-a4d8-4190-bf66-d8468be474d3 service nova] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.347223] env[62510]: DEBUG nova.compute.manager [req-f7a2fc8f-1ebf-4334-ac42-b918e722f983 req-44273254-a4d8-4190-bf66-d8468be474d3 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] No waiting events found dispatching network-vif-plugged-4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1740.347223] env[62510]: WARNING nova.compute.manager [req-f7a2fc8f-1ebf-4334-ac42-b918e722f983 req-44273254-a4d8-4190-bf66-d8468be474d3 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Received unexpected event network-vif-plugged-4568ba9b-dd3d-4796-bcfc-7bf80545a66b for instance with vm_state building and task_state spawning. [ 1740.352269] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769058, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.427156] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea618dd2-3c2f-4df3-a5bd-c0d05c0b675b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.435432] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8ceaf8-0368-4baa-bf46-d96cf8c1db18 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.476026] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ab7f51-c303-4a6d-8f95-0b5e9580eeca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.483501] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709c31e6-fb89-4d24-871d-919ed0714e10 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.501666] env[62510]: DEBUG nova.compute.provider_tree [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.509816] env[62510]: DEBUG nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1740.617685] env[62510]: DEBUG nova.network.neutron [-] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.622860] env[62510]: DEBUG nova.network.neutron [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Successfully updated port: 4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1740.736475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.736754] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Processing image f6223ac6-801a-4cf1-b252-449e54e92fe4 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1740.737009] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4/f6223ac6-801a-4cf1-b252-449e54e92fe4.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.737170] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4/f6223ac6-801a-4cf1-b252-449e54e92fe4.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.737354] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1740.737614] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-840cc720-d0cd-4b35-83de-72dbf9e36986 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.748402] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1740.748531] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1740.749318] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc5228da-491e-4396-b78e-9fd30b3512af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.755077] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1740.755077] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52368022-81c2-810a-1f6f-4a0b29b1e8d8" [ 1740.755077] env[62510]: _type = "Task" [ 1740.755077] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.766766] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52368022-81c2-810a-1f6f-4a0b29b1e8d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.782801] env[62510]: DEBUG nova.network.neutron [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Successfully created port: 764fd77c-e3bd-42f6-b51b-0a6c9e718b34 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1740.836928] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769058, 'name': Rename_Task, 'duration_secs': 0.154882} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.837254] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1740.837556] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f06f0af-7ad3-438f-9515-98c7fb78dde0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.845211] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1740.845211] env[62510]: value = "task-1769059" [ 1740.845211] env[62510]: _type = "Task" [ 1740.845211] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.854721] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.857179] env[62510]: DEBUG nova.compute.manager [req-40a61fe7-ad30-4534-96ba-7cfb3a81d2ee req-5331303e-aef3-4730-827d-d360ee3d7172 service nova] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Received event network-vif-deleted-ac9385ca-72de-486a-9901-44c6387f7c86 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1741.005520] env[62510]: DEBUG nova.scheduler.client.report [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1741.120947] env[62510]: INFO nova.compute.manager [-] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Took 1.40 seconds to deallocate network for instance. [ 1741.124773] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.124901] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.125070] env[62510]: DEBUG nova.network.neutron [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1741.266349] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Preparing fetch location {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1741.266881] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Fetch image to [datastore1] OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886/OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886.vmdk {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1741.266952] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Downloading stream optimized image f6223ac6-801a-4cf1-b252-449e54e92fe4 to [datastore1] OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886/OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886.vmdk on the data store datastore1 as vApp {{(pid=62510) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1741.267145] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Downloading image file data f6223ac6-801a-4cf1-b252-449e54e92fe4 to the ESX as VM named 'OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886' {{(pid=62510) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1741.356032] env[62510]: DEBUG oslo_vmware.api [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769059, 'name': PowerOnVM_Task, 'duration_secs': 0.49041} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.358081] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1741.358320] env[62510]: INFO nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Took 7.41 seconds to spawn the instance on the hypervisor. [ 1741.358505] env[62510]: DEBUG nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1741.360446] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf87c154-bdb5-41c4-aaeb-6dbfaf746625 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.381215] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1741.381215] env[62510]: value = "resgroup-9" [ 1741.381215] env[62510]: _type = "ResourcePool" [ 1741.381215] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1741.382113] env[62510]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-327de0f3-e42c-4cb9-b403-30f27befdf6c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.403784] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease: (returnval){ [ 1741.403784] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c0a57-9cda-28ab-0042-8247fec9dfc0" [ 1741.403784] env[62510]: _type = "HttpNfcLease" [ 1741.403784] env[62510]: } obtained for vApp import into resource pool (val){ [ 1741.403784] env[62510]: value = "resgroup-9" [ 1741.403784] env[62510]: _type = "ResourcePool" [ 1741.403784] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1741.404117] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the lease: (returnval){ [ 1741.404117] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c0a57-9cda-28ab-0042-8247fec9dfc0" [ 1741.404117] env[62510]: _type = "HttpNfcLease" [ 1741.404117] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1741.413136] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1741.413136] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c0a57-9cda-28ab-0042-8247fec9dfc0" [ 1741.413136] env[62510]: _type = "HttpNfcLease" [ 1741.413136] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1741.510454] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.512790] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.119s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.513033] env[62510]: DEBUG nova.objects.instance [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lazy-loading 'resources' on Instance uuid e3850272-9dae-4164-8f0e-f5513af23f49 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1741.516856] env[62510]: DEBUG nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1741.543628] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1741.543887] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1741.544053] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1741.544253] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1741.544402] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1741.544619] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1741.544808] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1741.546967] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1741.546967] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1741.546967] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1741.546967] env[62510]: DEBUG nova.virt.hardware [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1741.546967] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d151ca8-daee-42c0-ab4f-73fbbb980fea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.555472] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17435206-1da2-4c96-bed7-c1ec3e2f6ea6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.579553] env[62510]: INFO nova.scheduler.client.report [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Deleted allocations for instance 12768001-6ed0-47be-8f20-c59ee82b842a [ 1741.627649] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.684216] env[62510]: DEBUG nova.network.neutron [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1741.755820] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Acquiring lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.756088] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.756297] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Acquiring lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.756589] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.756716] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.758882] env[62510]: INFO nova.compute.manager [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Terminating instance [ 1741.882377] env[62510]: INFO nova.compute.manager [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Took 27.99 seconds to build instance. [ 1741.916555] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1741.916555] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c0a57-9cda-28ab-0042-8247fec9dfc0" [ 1741.916555] env[62510]: _type = "HttpNfcLease" [ 1741.916555] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1741.942935] env[62510]: DEBUG nova.network.neutron [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updating instance_info_cache with network_info: [{"id": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "address": "fa:16:3e:13:17:23", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4568ba9b-dd", "ovs_interfaceid": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.090276] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d2640428-ead9-4962-8344-9522d1848254 tempest-MigrationsAdminTest-857206338 tempest-MigrationsAdminTest-857206338-project-member] Lock "12768001-6ed0-47be-8f20-c59ee82b842a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.014s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.263436] env[62510]: DEBUG nova.compute.manager [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1742.263436] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1742.263693] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-674adbb9-b964-41af-b9c3-6097f335bcc6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.272398] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1742.272398] env[62510]: value = "task-1769061" [ 1742.272398] env[62510]: _type = "Task" [ 1742.272398] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.281698] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.356424] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395a82a1-f339-4dbe-a0b6-d138f164432b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.370785] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60530bb-00c8-4138-b545-379940eb390c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.379027] env[62510]: DEBUG nova.compute.manager [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Received event network-changed-4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1742.379357] env[62510]: DEBUG nova.compute.manager [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Refreshing instance network info cache due to event network-changed-4568ba9b-dd3d-4796-bcfc-7bf80545a66b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1742.379609] env[62510]: DEBUG oslo_concurrency.lockutils [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] Acquiring lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1742.385855] env[62510]: DEBUG oslo_concurrency.lockutils [None req-731a093b-26f1-4e58-b413-ebbe4a23438d tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.501s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.420527] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f7f7ed-409c-4498-bad3-aa8a68f36a57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.431764] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3082b7-c39e-4e1d-b4cb-6d869ddaacdf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.435160] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1742.435160] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c0a57-9cda-28ab-0042-8247fec9dfc0" [ 1742.435160] env[62510]: _type = "HttpNfcLease" [ 1742.435160] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1742.435516] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1742.435516] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c0a57-9cda-28ab-0042-8247fec9dfc0" [ 1742.435516] env[62510]: _type = "HttpNfcLease" [ 1742.435516] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1742.440022] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1598174-0995-4640-af2e-3573eeb4451c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.450660] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1742.451027] env[62510]: DEBUG nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Instance network_info: |[{"id": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "address": "fa:16:3e:13:17:23", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4568ba9b-dd", "ovs_interfaceid": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1742.451510] env[62510]: DEBUG nova.compute.provider_tree [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.453270] env[62510]: DEBUG oslo_concurrency.lockutils [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] Acquired lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1742.453462] env[62510]: DEBUG nova.network.neutron [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Refreshing network info cache for port 4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1742.461035] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:17:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4568ba9b-dd3d-4796-bcfc-7bf80545a66b', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1742.463483] env[62510]: DEBUG oslo.service.loopingcall [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1742.467529] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1742.467894] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52636861-8483-9cb6-96f1-74ee23a602a3/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1742.468088] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52636861-8483-9cb6-96f1-74ee23a602a3/disk-0.vmdk. {{(pid=62510) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1742.470280] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-464719ae-6542-4b2f-9567-ed03ae92b26f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.556043] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b2190e8e-66af-4bdc-8a5d-56ecfe63f143 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.557831] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1742.557831] env[62510]: value = "task-1769062" [ 1742.557831] env[62510]: _type = "Task" [ 1742.557831] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.568797] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769062, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.784699] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769061, 'name': PowerOffVM_Task, 'duration_secs': 0.219273} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.785106] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1742.785321] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1742.785539] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367276', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'name': 'volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '568ce58c-9ce5-4b40-988f-f31d8e0c376d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'serial': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1742.786406] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0283582-c843-4bff-8d08-9eac4e97e828 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.807111] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f485860d-f85d-451c-a792-08ffcd6a4629 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.816176] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae69d3ed-ede8-45ef-acf2-349b8ae532b2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.839442] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4da5da-b197-47cf-8234-9a7d55d892f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.856891] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] The volume has not been displaced from its original location: [datastore1] volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62/volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1742.862598] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1742.865897] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a43f3c5e-3b52-413e-b359-dc3db99c248c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.891300] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1742.891300] env[62510]: value = "task-1769063" [ 1742.891300] env[62510]: _type = "Task" [ 1742.891300] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.903989] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769063, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.948792] env[62510]: DEBUG nova.network.neutron [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Successfully updated port: 764fd77c-e3bd-42f6-b51b-0a6c9e718b34 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1742.969106] env[62510]: DEBUG nova.scheduler.client.report [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1743.074988] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769062, 'name': CreateVM_Task, 'duration_secs': 0.508403} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.078056] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1743.078868] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.079279] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.079748] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1743.079921] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d81188d-7549-4a37-9236-be018852d147 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.088612] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1743.088612] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5220b885-f61c-668d-c626-499e573fda38" [ 1743.088612] env[62510]: _type = "Task" [ 1743.088612] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.102629] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5220b885-f61c-668d-c626-499e573fda38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.374966] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Completed reading data from the image iterator. {{(pid=62510) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1743.375312] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52636861-8483-9cb6-96f1-74ee23a602a3/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1743.376165] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2c4793-ac39-4c55-a492-e23ecfdd5ef3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.383419] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52636861-8483-9cb6-96f1-74ee23a602a3/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1743.383655] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52636861-8483-9cb6-96f1-74ee23a602a3/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1743.383923] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c2dda779-d044-4669-bc4e-19fb4c793823 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.401783] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769063, 'name': ReconfigVM_Task, 'duration_secs': 0.239494} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.404175] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1743.409032] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5267700b-acf3-45bb-8a0d-75e96bf71a93 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.425572] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1743.425572] env[62510]: value = "task-1769064" [ 1743.425572] env[62510]: _type = "Task" [ 1743.425572] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.434417] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769064, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.452190] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "refresh_cache-8b079310-084b-4ba0-8a82-57d64f421c11" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.452382] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "refresh_cache-8b079310-084b-4ba0-8a82-57d64f421c11" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.452503] env[62510]: DEBUG nova.network.neutron [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1743.477289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.964s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.479742] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.662s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.481368] env[62510]: INFO nova.compute.claims [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1743.524469] env[62510]: DEBUG nova.network.neutron [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updated VIF entry in instance network info cache for port 4568ba9b-dd3d-4796-bcfc-7bf80545a66b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1743.525031] env[62510]: DEBUG nova.network.neutron [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updating instance_info_cache with network_info: [{"id": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "address": "fa:16:3e:13:17:23", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4568ba9b-dd", "ovs_interfaceid": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.579582] env[62510]: INFO nova.scheduler.client.report [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Deleted allocations for instance e3850272-9dae-4164-8f0e-f5513af23f49 [ 1743.603558] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5220b885-f61c-668d-c626-499e573fda38, 'name': SearchDatastore_Task, 'duration_secs': 0.025792} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.603558] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.603864] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1743.603900] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.604056] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.605037] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1743.605037] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8c355e2-8d01-41df-908a-ccf76d039b9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.616632] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1743.616962] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1743.619470] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ad912a-d86d-45ed-820c-a594ddab52f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.627503] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1743.627503] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d3684a-805e-7351-b001-48fb823f871a" [ 1743.627503] env[62510]: _type = "Task" [ 1743.627503] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.642157] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d3684a-805e-7351-b001-48fb823f871a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.790000] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52636861-8483-9cb6-96f1-74ee23a602a3/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1743.790238] env[62510]: INFO nova.virt.vmwareapi.images [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Downloaded image file data f6223ac6-801a-4cf1-b252-449e54e92fe4 [ 1743.791096] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca544ce-b40b-4c68-9ce5-5771bffa419b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.810486] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d74d8d9d-1dce-4b97-ae7e-3db54b56f3d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.875474] env[62510]: INFO nova.virt.vmwareapi.images [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] The imported VM was unregistered [ 1743.878285] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Caching image {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1743.878571] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating directory with path [datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1743.878998] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fcf018d-22c0-4fc8-a6ed-7f6243a3db3b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.891722] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created directory with path [datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1743.891949] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886/OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886.vmdk to [datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4/f6223ac6-801a-4cf1-b252-449e54e92fe4.vmdk. {{(pid=62510) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1743.893376] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-12f03e47-5450-435e-8e5e-ae54d51d4e1a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.898292] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "774ea198-c933-449a-8380-2e4cc9327389" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.898545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.904356] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1743.904356] env[62510]: value = "task-1769066" [ 1743.904356] env[62510]: _type = "Task" [ 1743.904356] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.915217] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769066, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.940914] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769064, 'name': ReconfigVM_Task, 'duration_secs': 0.233856} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.941422] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367276', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'name': 'volume-b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '568ce58c-9ce5-4b40-988f-f31d8e0c376d', 'attached_at': '', 'detached_at': '', 'volume_id': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62', 'serial': 'b09085d6-32c6-49d0-a8fe-b4eb76d80b62'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1743.941702] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1743.942581] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5bd4b1-6be1-4767-a080-e8af9d6772ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.951417] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1743.951701] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62c192f2-2a5d-4ba5-9769-9950b45bc218 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.995991] env[62510]: DEBUG nova.network.neutron [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1744.029130] env[62510]: DEBUG oslo_concurrency.lockutils [req-06d1848f-26a6-4b82-a30c-e5967fd8e869 req-1f1c73f2-8199-4209-8e6a-7530af8e9df5 service nova] Releasing lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.054069] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1744.054349] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1744.054570] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Deleting the datastore file [datastore1] 568ce58c-9ce5-4b40-988f-f31d8e0c376d {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1744.054866] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af90c6a3-bbd4-40de-92e1-2fd8b738512e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.062230] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for the task: (returnval){ [ 1744.062230] env[62510]: value = "task-1769068" [ 1744.062230] env[62510]: _type = "Task" [ 1744.062230] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.073679] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.089149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7cb054d8-944c-43c2-9cac-cc1621370fc5 tempest-SecurityGroupsTestJSON-2125751649 tempest-SecurityGroupsTestJSON-2125751649-project-member] Lock "e3850272-9dae-4164-8f0e-f5513af23f49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.045s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.142526] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d3684a-805e-7351-b001-48fb823f871a, 'name': SearchDatastore_Task, 'duration_secs': 0.015388} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.142526] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0aaf91bc-6b62-44a3-9d6d-37164dbe380c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.150649] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1744.150649] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a3012c-46a0-e2a3-2291-ac5ef81f5f37" [ 1744.150649] env[62510]: _type = "Task" [ 1744.150649] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.162379] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a3012c-46a0-e2a3-2291-ac5ef81f5f37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.231504] env[62510]: DEBUG nova.network.neutron [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Updating instance_info_cache with network_info: [{"id": "764fd77c-e3bd-42f6-b51b-0a6c9e718b34", "address": "fa:16:3e:5d:b5:6c", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap764fd77c-e3", "ovs_interfaceid": "764fd77c-e3bd-42f6-b51b-0a6c9e718b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.402022] env[62510]: DEBUG nova.compute.utils [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1744.415028] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769066, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.568174] env[62510]: DEBUG nova.compute.manager [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Received event network-vif-plugged-764fd77c-e3bd-42f6-b51b-0a6c9e718b34 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1744.568433] env[62510]: DEBUG oslo_concurrency.lockutils [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] Acquiring lock "8b079310-084b-4ba0-8a82-57d64f421c11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.568665] env[62510]: DEBUG oslo_concurrency.lockutils [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] Lock "8b079310-084b-4ba0-8a82-57d64f421c11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.568781] env[62510]: DEBUG oslo_concurrency.lockutils [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] Lock "8b079310-084b-4ba0-8a82-57d64f421c11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.569012] env[62510]: DEBUG nova.compute.manager [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] No waiting events found dispatching network-vif-plugged-764fd77c-e3bd-42f6-b51b-0a6c9e718b34 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1744.569128] env[62510]: WARNING nova.compute.manager [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Received unexpected event network-vif-plugged-764fd77c-e3bd-42f6-b51b-0a6c9e718b34 for instance with vm_state building and task_state spawning. [ 1744.569290] env[62510]: DEBUG nova.compute.manager [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Received event network-changed-764fd77c-e3bd-42f6-b51b-0a6c9e718b34 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1744.569448] env[62510]: DEBUG nova.compute.manager [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Refreshing instance network info cache due to event network-changed-764fd77c-e3bd-42f6-b51b-0a6c9e718b34. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1744.569613] env[62510]: DEBUG oslo_concurrency.lockutils [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] Acquiring lock "refresh_cache-8b079310-084b-4ba0-8a82-57d64f421c11" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.576574] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769068, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.663827] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a3012c-46a0-e2a3-2291-ac5ef81f5f37, 'name': SearchDatastore_Task, 'duration_secs': 0.097358} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.666801] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.667123] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1744.667612] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32d9e70a-73a7-4e23-8c0f-bb66df1509b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.676904] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1744.676904] env[62510]: value = "task-1769069" [ 1744.676904] env[62510]: _type = "Task" [ 1744.676904] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.689724] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.734426] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "refresh_cache-8b079310-084b-4ba0-8a82-57d64f421c11" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.734794] env[62510]: DEBUG nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Instance network_info: |[{"id": "764fd77c-e3bd-42f6-b51b-0a6c9e718b34", "address": "fa:16:3e:5d:b5:6c", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap764fd77c-e3", "ovs_interfaceid": "764fd77c-e3bd-42f6-b51b-0a6c9e718b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1744.735163] env[62510]: DEBUG oslo_concurrency.lockutils [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] Acquired lock "refresh_cache-8b079310-084b-4ba0-8a82-57d64f421c11" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.735380] env[62510]: DEBUG nova.network.neutron [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Refreshing network info cache for port 764fd77c-e3bd-42f6-b51b-0a6c9e718b34 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1744.736963] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:b5:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '764fd77c-e3bd-42f6-b51b-0a6c9e718b34', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1744.744921] env[62510]: DEBUG oslo.service.loopingcall [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1744.749071] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1744.749574] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94529896-f7fc-48f4-9af6-cd7dd7e96927 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.776156] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1744.776156] env[62510]: value = "task-1769070" [ 1744.776156] env[62510]: _type = "Task" [ 1744.776156] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.788126] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769070, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.850829] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf833aa-da77-4a8d-a16e-a93ea16bb1ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.862554] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae232f5c-f08f-4a90-8587-3d6ef63f38c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.898893] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5d2132-9680-44a6-aa2d-5bd9c152139d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.905457] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.913636] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa15117-d251-458d-81b5-c0f206362ce8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.925829] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769066, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.934026] env[62510]: DEBUG nova.compute.provider_tree [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.075767] env[62510]: DEBUG oslo_vmware.api [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Task: {'id': task-1769068, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.948741} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.076023] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1745.076232] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1745.076380] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1745.076551] env[62510]: INFO nova.compute.manager [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Took 2.81 seconds to destroy the instance on the hypervisor. [ 1745.076838] env[62510]: DEBUG oslo.service.loopingcall [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1745.077052] env[62510]: DEBUG nova.compute.manager [-] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1745.077154] env[62510]: DEBUG nova.network.neutron [-] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1745.196514] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.287999] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769070, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.422532] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769066, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.438781] env[62510]: DEBUG nova.scheduler.client.report [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1745.693312] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.790337] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769070, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.918039] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769066, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.946669] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.947361] env[62510]: DEBUG nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1745.950344] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.860s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.950667] env[62510]: DEBUG nova.objects.instance [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lazy-loading 'resources' on Instance uuid 7cc6d4a6-2765-44e7-b378-e213a562593d {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1746.009699] env[62510]: DEBUG nova.network.neutron [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Updated VIF entry in instance network info cache for port 764fd77c-e3bd-42f6-b51b-0a6c9e718b34. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1746.010670] env[62510]: DEBUG nova.network.neutron [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Updating instance_info_cache with network_info: [{"id": "764fd77c-e3bd-42f6-b51b-0a6c9e718b34", "address": "fa:16:3e:5d:b5:6c", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap764fd77c-e3", "ovs_interfaceid": "764fd77c-e3bd-42f6-b51b-0a6c9e718b34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.141902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "774ea198-c933-449a-8380-2e4cc9327389" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.142286] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.142545] env[62510]: INFO nova.compute.manager [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Attaching volume 93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365 to /dev/sdb [ 1746.194435] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769069, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.206291] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3936ca05-8740-4938-be82-1e0ad63a399e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.214871] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c19b3b5-5773-4bca-94f1-95ea7c1b5457 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.231968] env[62510]: DEBUG nova.virt.block_device [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Updating existing volume attachment record: 9ac7ec30-9877-4b60-9911-db627f2d0122 {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1746.292334] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769070, 'name': CreateVM_Task, 'duration_secs': 1.262735} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.292334] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1746.292334] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.292334] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.292861] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1746.292861] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-154acb3a-d04e-400e-9baa-da2de480c9db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.299297] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1746.299297] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5242383a-a11f-0b0d-d877-2887d91fbf1b" [ 1746.299297] env[62510]: _type = "Task" [ 1746.299297] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.312023] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5242383a-a11f-0b0d-d877-2887d91fbf1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.418905] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769066, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.455685] env[62510]: DEBUG nova.compute.utils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1746.466162] env[62510]: DEBUG nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1746.467277] env[62510]: DEBUG nova.network.neutron [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1746.514065] env[62510]: DEBUG oslo_concurrency.lockutils [req-619309b0-b26e-450c-bc14-e72c27df8a66 req-56c14117-de92-40cd-bd36-693fc1eed10c service nova] Releasing lock "refresh_cache-8b079310-084b-4ba0-8a82-57d64f421c11" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1746.597040] env[62510]: DEBUG nova.policy [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '835afe20c2524b6f8118c06e35e556ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb86ec297e4d458a8027a275ced1825f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1746.693983] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769069, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.942072} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.695403] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1746.695403] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1746.695403] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-389e3953-d714-4114-99f0-f02074d96444 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.705119] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1746.705119] env[62510]: value = "task-1769072" [ 1746.705119] env[62510]: _type = "Task" [ 1746.705119] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.720254] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769072, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.776346] env[62510]: DEBUG nova.network.neutron [-] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.817807] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5242383a-a11f-0b0d-d877-2887d91fbf1b, 'name': SearchDatastore_Task, 'duration_secs': 0.060208} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.818510] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1746.819200] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1746.819200] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1746.819200] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1746.819409] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1746.820747] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e577ed9-09e2-4dc8-b6b7-43b66f52178c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.832696] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1746.832880] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1746.833697] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e6ed54e-a325-4eab-b886-d963a1d35859 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.839936] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1746.839936] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525c9846-b900-23ad-0e6f-4412a3323479" [ 1746.839936] env[62510]: _type = "Task" [ 1746.839936] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.853348] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525c9846-b900-23ad-0e6f-4412a3323479, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.919605] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769066, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.643964} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.923078] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886/OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886.vmdk to [datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4/f6223ac6-801a-4cf1-b252-449e54e92fe4.vmdk. [ 1746.923462] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Cleaning up location [datastore1] OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1746.923766] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a1107bd8-baab-4030-9530-8f69edd90886 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1746.924377] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a395ce15-9854-41bb-bc53-d5377f79ad38 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.933103] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1746.933103] env[62510]: value = "task-1769075" [ 1746.933103] env[62510]: _type = "Task" [ 1746.933103] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.945819] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.948702] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a6ec01-4d64-4654-8e7b-5c28653a2503 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.957053] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dbea82-5c8e-4069-89e4-e95830f527d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.997104] env[62510]: DEBUG nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1747.001969] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d660e7-cc68-4eeb-bcf0-e95c84626c19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.005986] env[62510]: DEBUG nova.compute.manager [req-51340753-1722-4a91-9eb1-fb4c3087b0e7 req-a66bc46e-1f60-4c20-9c26-71f5707a03a8 service nova] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Received event network-vif-deleted-e1df700d-6a97-4814-9a7d-e381d485b8b4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1747.015327] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3570c4a0-8ffa-4e3d-ba7a-5eb2b52432e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.031475] env[62510]: DEBUG nova.compute.provider_tree [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1747.202401] env[62510]: DEBUG nova.network.neutron [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Successfully created port: 9331bfef-61be-4509-82fc-39111423b497 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1747.217313] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769072, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161752} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.217591] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1747.218462] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbf1700-c9e1-4132-8151-56ba3f30cd06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.244805] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1747.245138] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98e20657-7308-49f7-9adc-777913e11f05 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.267024] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1747.267024] env[62510]: value = "task-1769076" [ 1747.267024] env[62510]: _type = "Task" [ 1747.267024] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.277902] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769076, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.279695] env[62510]: INFO nova.compute.manager [-] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Took 2.20 seconds to deallocate network for instance. [ 1747.356403] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525c9846-b900-23ad-0e6f-4412a3323479, 'name': SearchDatastore_Task, 'duration_secs': 0.011317} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.356403] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a90850e0-db96-4e8c-b987-745843022507 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.362805] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1747.362805] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523b58d2-abfb-5538-a509-d26c93b22cc4" [ 1747.362805] env[62510]: _type = "Task" [ 1747.362805] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.372223] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523b58d2-abfb-5538-a509-d26c93b22cc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.446169] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061516} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.446462] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1747.446657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4/f6223ac6-801a-4cf1-b252-449e54e92fe4.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.447529] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4/f6223ac6-801a-4cf1-b252-449e54e92fe4.vmdk to [datastore1] 144052ab-e3e7-401f-9edb-d8088780e468/144052ab-e3e7-401f-9edb-d8088780e468.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1747.447529] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4706304b-2987-4a0b-b663-2df9fa29aa82 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.463729] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1747.463729] env[62510]: value = "task-1769077" [ 1747.463729] env[62510]: _type = "Task" [ 1747.463729] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.476101] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.534089] env[62510]: DEBUG nova.scheduler.client.report [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1747.591838] env[62510]: INFO nova.compute.manager [None req-cb1c482b-0d53-4fc9-bded-6fab1aee3930 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Get console output [ 1747.591838] env[62510]: WARNING nova.virt.vmwareapi.driver [None req-cb1c482b-0d53-4fc9-bded-6fab1aee3930 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] The console log is missing. Check your VSPC configuration [ 1747.785766] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769076, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.876213] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523b58d2-abfb-5538-a509-d26c93b22cc4, 'name': SearchDatastore_Task, 'duration_secs': 0.012713} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.878873] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1747.878873] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8b079310-084b-4ba0-8a82-57d64f421c11/8b079310-084b-4ba0-8a82-57d64f421c11.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1747.878873] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65ecaaed-30ea-4538-a44c-954b780f8765 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.887737] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1747.887737] env[62510]: value = "task-1769078" [ 1747.887737] env[62510]: _type = "Task" [ 1747.887737] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.901149] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769078, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.902443] env[62510]: INFO nova.compute.manager [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Took 0.62 seconds to detach 1 volumes for instance. [ 1747.911021] env[62510]: DEBUG nova.compute.manager [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Deleting volume: b09085d6-32c6-49d0-a8fe-b4eb76d80b62 {{(pid=62510) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1747.979254] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769077, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.011410] env[62510]: DEBUG nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1748.039178] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.089s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.041852] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.086s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.044011] env[62510]: DEBUG nova.objects.instance [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lazy-loading 'pci_requests' on Instance uuid e7daad63-c802-4a86-bead-7e849064ed61 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1748.067957] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1748.070023] env[62510]: DEBUG nova.virt.hardware [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1748.071315] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30ec7f8-a9e0-4f63-8d22-d4b8a7e69630 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.082364] env[62510]: INFO nova.scheduler.client.report [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted allocations for instance 7cc6d4a6-2765-44e7-b378-e213a562593d [ 1748.084607] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b836468-0b40-4299-a75e-30e21d8a9870 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.280229] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769076, 'name': ReconfigVM_Task, 'duration_secs': 0.553476} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.280561] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfigured VM instance instance-0000004e to attach disk [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1748.281464] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-52647d07-7be2-43ef-9ce3-59cb51f6c03a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.290322] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1748.290322] env[62510]: value = "task-1769080" [ 1748.290322] env[62510]: _type = "Task" [ 1748.290322] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.302288] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769080, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.404479] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769078, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.484747] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769077, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.516862] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.547812] env[62510]: DEBUG nova.objects.instance [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lazy-loading 'numa_topology' on Instance uuid e7daad63-c802-4a86-bead-7e849064ed61 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1748.613186] env[62510]: DEBUG oslo_concurrency.lockutils [None req-fce85173-013c-4180-aa3c-60eca3b7ad6a tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "7cc6d4a6-2765-44e7-b378-e213a562593d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.542s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.802218] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769080, 'name': Rename_Task, 'duration_secs': 0.181651} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.802725] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1748.803035] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d131f758-caf6-4b8a-9df8-5fca92630646 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.812010] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1748.812010] env[62510]: value = "task-1769081" [ 1748.812010] env[62510]: _type = "Task" [ 1748.812010] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.823565] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.859964] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "3533a113-6f46-4b18-872d-9bc1b0481969" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.859964] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "3533a113-6f46-4b18-872d-9bc1b0481969" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.860208] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "3533a113-6f46-4b18-872d-9bc1b0481969-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.860275] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "3533a113-6f46-4b18-872d-9bc1b0481969-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.860465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "3533a113-6f46-4b18-872d-9bc1b0481969-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.863071] env[62510]: INFO nova.compute.manager [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Terminating instance [ 1748.900253] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769078, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.975929] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769077, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.050902] env[62510]: INFO nova.compute.claims [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1749.131910] env[62510]: DEBUG nova.network.neutron [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Successfully updated port: 9331bfef-61be-4509-82fc-39111423b497 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1749.242255] env[62510]: DEBUG nova.compute.manager [req-f2c0c508-9a02-49e7-ba93-20724b4fb4e1 req-4b6a9f6d-b1fc-439a-a513-212f98d94911 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Received event network-vif-plugged-9331bfef-61be-4509-82fc-39111423b497 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1749.242508] env[62510]: DEBUG oslo_concurrency.lockutils [req-f2c0c508-9a02-49e7-ba93-20724b4fb4e1 req-4b6a9f6d-b1fc-439a-a513-212f98d94911 service nova] Acquiring lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.242716] env[62510]: DEBUG oslo_concurrency.lockutils [req-f2c0c508-9a02-49e7-ba93-20724b4fb4e1 req-4b6a9f6d-b1fc-439a-a513-212f98d94911 service nova] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.242891] env[62510]: DEBUG oslo_concurrency.lockutils [req-f2c0c508-9a02-49e7-ba93-20724b4fb4e1 req-4b6a9f6d-b1fc-439a-a513-212f98d94911 service nova] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.243073] env[62510]: DEBUG nova.compute.manager [req-f2c0c508-9a02-49e7-ba93-20724b4fb4e1 req-4b6a9f6d-b1fc-439a-a513-212f98d94911 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] No waiting events found dispatching network-vif-plugged-9331bfef-61be-4509-82fc-39111423b497 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1749.243245] env[62510]: WARNING nova.compute.manager [req-f2c0c508-9a02-49e7-ba93-20724b4fb4e1 req-4b6a9f6d-b1fc-439a-a513-212f98d94911 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Received unexpected event network-vif-plugged-9331bfef-61be-4509-82fc-39111423b497 for instance with vm_state building and task_state spawning. [ 1749.326774] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769081, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.367963] env[62510]: DEBUG nova.compute.manager [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1749.368246] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1749.369324] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dbc05c-63a6-4e36-892f-6ad10913ecb6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.379352] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1749.379664] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6af3921-dbf8-42c6-97eb-5a85edf338e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.388338] env[62510]: DEBUG oslo_vmware.api [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1749.388338] env[62510]: value = "task-1769083" [ 1749.388338] env[62510]: _type = "Task" [ 1749.388338] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.405065] env[62510]: DEBUG oslo_vmware.api [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.405394] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769078, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.480239] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769077, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.643362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "refresh_cache-91a76cc7-7f82-42cf-a379-fc0ba3d04568" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.643362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquired lock "refresh_cache-91a76cc7-7f82-42cf-a379-fc0ba3d04568" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.643362] env[62510]: DEBUG nova.network.neutron [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1749.825786] env[62510]: DEBUG oslo_vmware.api [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769081, 'name': PowerOnVM_Task, 'duration_secs': 0.529707} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.825786] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1749.826202] env[62510]: INFO nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Took 10.91 seconds to spawn the instance on the hypervisor. [ 1749.826475] env[62510]: DEBUG nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1749.831172] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751b4acb-e13e-4153-96bc-4634344b46b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.904845] env[62510]: DEBUG oslo_vmware.api [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769083, 'name': PowerOffVM_Task, 'duration_secs': 0.324009} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.905162] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1749.905337] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1749.905631] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e3662fe-96db-4806-99eb-12213a76e8e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.911371] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769078, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.935603] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.935603] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.935603] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.935603] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.935603] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.937694] env[62510]: INFO nova.compute.manager [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Terminating instance [ 1749.940856] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e76838-16c7-48d1-a6df-b9d45ee00548 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.952680] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c4eef9-8bba-42a0-8844-c9a4704b9467 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.990047] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009ceec8-5d0c-4eb6-b810-9230e1a54345 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.999393] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769077, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.003172] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1c7912-81a2-499f-9e0b-bab9eb32ddba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.023113] env[62510]: DEBUG nova.compute.provider_tree [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1750.107861] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1750.108177] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1750.108443] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Deleting the datastore file [datastore1] 3533a113-6f46-4b18-872d-9bc1b0481969 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1750.108862] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc398d07-9b89-4a0a-845d-4647f27beb31 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.121883] env[62510]: DEBUG oslo_vmware.api [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1750.121883] env[62510]: value = "task-1769085" [ 1750.121883] env[62510]: _type = "Task" [ 1750.121883] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.133021] env[62510]: DEBUG oslo_vmware.api [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.208827] env[62510]: DEBUG nova.network.neutron [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1750.355024] env[62510]: INFO nova.compute.manager [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Took 29.74 seconds to build instance. [ 1750.405546] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769078, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.460972} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.406029] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8b079310-084b-4ba0-8a82-57d64f421c11/8b079310-084b-4ba0-8a82-57d64f421c11.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1750.406029] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1750.410017] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-526c206b-1a0e-4b98-ba76-83ec19bf4a72 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.414653] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1750.414653] env[62510]: value = "task-1769086" [ 1750.414653] env[62510]: _type = "Task" [ 1750.414653] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.426899] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.448458] env[62510]: DEBUG nova.compute.manager [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1750.448704] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1750.449652] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b9b98b-6483-4af8-9380-5d72cd4d8b1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.462206] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1750.462536] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0efdc590-c6c5-4858-948b-3d75faab9a02 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.470793] env[62510]: DEBUG oslo_vmware.api [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1750.470793] env[62510]: value = "task-1769087" [ 1750.470793] env[62510]: _type = "Task" [ 1750.470793] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.482824] env[62510]: DEBUG oslo_vmware.api [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.497379] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769077, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.703123} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.497813] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/f6223ac6-801a-4cf1-b252-449e54e92fe4/f6223ac6-801a-4cf1-b252-449e54e92fe4.vmdk to [datastore1] 144052ab-e3e7-401f-9edb-d8088780e468/144052ab-e3e7-401f-9edb-d8088780e468.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1750.500487] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cffa3a1-d583-4b9a-980b-6d24023d6c71 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.527770] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 144052ab-e3e7-401f-9edb-d8088780e468/144052ab-e3e7-401f-9edb-d8088780e468.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1750.528897] env[62510]: DEBUG nova.scheduler.client.report [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1750.532557] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-696c1b2c-85a1-4dc1-b061-5d2a9fb92645 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.548562] env[62510]: DEBUG nova.network.neutron [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Updating instance_info_cache with network_info: [{"id": "9331bfef-61be-4509-82fc-39111423b497", "address": "fa:16:3e:ec:2e:59", "network": {"id": "4dc68c52-bf74-434c-9d04-c2812539e5bc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2126530041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb86ec297e4d458a8027a275ced1825f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9331bfef-61", "ovs_interfaceid": "9331bfef-61be-4509-82fc-39111423b497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.558499] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1750.558499] env[62510]: value = "task-1769088" [ 1750.558499] env[62510]: _type = "Task" [ 1750.558499] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.576821] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.632200] env[62510]: DEBUG oslo_vmware.api [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.334633} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.632481] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1750.632673] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1750.632852] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1750.633036] env[62510]: INFO nova.compute.manager [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1750.633297] env[62510]: DEBUG oslo.service.loopingcall [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1750.633497] env[62510]: DEBUG nova.compute.manager [-] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1750.633594] env[62510]: DEBUG nova.network.neutron [-] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1750.857763] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a9502d7-0443-44da-b58f-9f5b6a3f2ff1 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.254s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.930020] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071922} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.930020] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1750.930020] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e8f833-0af3-4fb2-91e5-eba2695fb00e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.961845] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 8b079310-084b-4ba0-8a82-57d64f421c11/8b079310-084b-4ba0-8a82-57d64f421c11.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1750.965023] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-553fa4a8-f5c9-4e84-b845-236ec59a5491 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.988759] env[62510]: DEBUG oslo_vmware.api [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769087, 'name': PowerOffVM_Task, 'duration_secs': 0.400998} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.990384] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1750.990572] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1750.990880] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1750.990880] env[62510]: value = "task-1769089" [ 1750.990880] env[62510]: _type = "Task" [ 1750.990880] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.991084] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e93fa8c8-0fd2-4a43-9e62-f26858731621 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.001736] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769089, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.050906] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.009s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.056401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Releasing lock "refresh_cache-91a76cc7-7f82-42cf-a379-fc0ba3d04568" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.056401] env[62510]: DEBUG nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Instance network_info: |[{"id": "9331bfef-61be-4509-82fc-39111423b497", "address": "fa:16:3e:ec:2e:59", "network": {"id": "4dc68c52-bf74-434c-9d04-c2812539e5bc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2126530041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb86ec297e4d458a8027a275ced1825f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9331bfef-61", "ovs_interfaceid": "9331bfef-61be-4509-82fc-39111423b497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1751.056401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 14.077s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.056401] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:2e:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9331bfef-61be-4509-82fc-39111423b497', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1751.064281] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Creating folder: Project (eb86ec297e4d458a8027a275ced1825f). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1751.065682] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70cc92b6-69ea-4f63-a138-628036cd3acf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.078194] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769088, 'name': ReconfigVM_Task, 'duration_secs': 0.343099} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.078513] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 144052ab-e3e7-401f-9edb-d8088780e468/144052ab-e3e7-401f-9edb-d8088780e468.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1751.079236] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98c366f1-6c79-41ae-92f9-9ed5a21d4a12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.083676] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Created folder: Project (eb86ec297e4d458a8027a275ced1825f) in parent group-v367197. [ 1751.083863] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Creating folder: Instances. Parent ref: group-v367405. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1751.084613] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6e482a5-3447-40f3-9e9d-180efadc277e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.089920] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1751.089920] env[62510]: value = "task-1769092" [ 1751.089920] env[62510]: _type = "Task" [ 1751.089920] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.101027] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769092, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.102375] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Created folder: Instances in parent group-v367405. [ 1751.102609] env[62510]: DEBUG oslo.service.loopingcall [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1751.102805] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1751.103027] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4b1d9d6-5409-4629-be83-fa9b594160ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.127192] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1751.127192] env[62510]: value = "task-1769094" [ 1751.127192] env[62510]: _type = "Task" [ 1751.127192] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.136931] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769094, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.172640] env[62510]: INFO nova.network.neutron [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating port 13fb40b1-132b-407d-b6e0-eec141ae88a8 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1751.199138] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1751.199427] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1751.199642] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleting the datastore file [datastore1] e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1751.199992] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fffd010b-783a-4a1a-8a1b-ee917f4e19bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.212820] env[62510]: DEBUG oslo_vmware.api [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for the task: (returnval){ [ 1751.212820] env[62510]: value = "task-1769095" [ 1751.212820] env[62510]: _type = "Task" [ 1751.212820] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.223693] env[62510]: DEBUG oslo_vmware.api [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.310980] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1751.311247] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367404', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'name': 'volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '774ea198-c933-449a-8380-2e4cc9327389', 'attached_at': '', 'detached_at': '', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'serial': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1751.312204] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e27e041-ef6d-4833-8eec-bdaa8fa2e16b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.335915] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-486e27d1-3cd1-49e2-a4bb-dedc6f785853 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.365313] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365/volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1751.365669] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5aacf682-f2a7-424d-a6e3-c7468600e960 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.390825] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1751.390825] env[62510]: value = "task-1769096" [ 1751.390825] env[62510]: _type = "Task" [ 1751.390825] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.402298] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769096, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.438799] env[62510]: DEBUG nova.compute.manager [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Received event network-changed-9331bfef-61be-4509-82fc-39111423b497 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1751.439025] env[62510]: DEBUG nova.compute.manager [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Refreshing instance network info cache due to event network-changed-9331bfef-61be-4509-82fc-39111423b497. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1751.439289] env[62510]: DEBUG oslo_concurrency.lockutils [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] Acquiring lock "refresh_cache-91a76cc7-7f82-42cf-a379-fc0ba3d04568" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.439440] env[62510]: DEBUG oslo_concurrency.lockutils [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] Acquired lock "refresh_cache-91a76cc7-7f82-42cf-a379-fc0ba3d04568" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.439607] env[62510]: DEBUG nova.network.neutron [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Refreshing network info cache for port 9331bfef-61be-4509-82fc-39111423b497 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1751.508552] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769089, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.570378] env[62510]: INFO nova.compute.claims [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1751.606161] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769092, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.638384] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769094, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.720481] env[62510]: DEBUG oslo_vmware.api [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Task: {'id': task-1769095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137705} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.720592] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1751.720730] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1751.720913] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1751.721138] env[62510]: INFO nova.compute.manager [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1751.721404] env[62510]: DEBUG oslo.service.loopingcall [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1751.721603] env[62510]: DEBUG nova.compute.manager [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1751.721703] env[62510]: DEBUG nova.network.neutron [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1751.901800] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769096, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.006789] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769089, 'name': ReconfigVM_Task, 'duration_secs': 0.562458} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.007201] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 8b079310-084b-4ba0-8a82-57d64f421c11/8b079310-084b-4ba0-8a82-57d64f421c11.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1752.007889] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f0da2f9-a4d2-471b-8fa6-af7a097bbdf2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.018726] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1752.018726] env[62510]: value = "task-1769097" [ 1752.018726] env[62510]: _type = "Task" [ 1752.018726] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.028695] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769097, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.077709] env[62510]: INFO nova.compute.resource_tracker [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating resource usage from migration 25f05b49-1007-47d0-bf52-3f3d8c3c0d9b [ 1752.104359] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769092, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.140202] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769094, 'name': CreateVM_Task, 'duration_secs': 0.558939} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.140381] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1752.141073] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.141245] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.141584] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1752.142731] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3a3d483-bd35-4035-9816-492dc5ef78e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.151932] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1752.151932] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52541e39-5b34-bc64-c0f6-3ceb4780e377" [ 1752.151932] env[62510]: _type = "Task" [ 1752.151932] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.162860] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52541e39-5b34-bc64-c0f6-3ceb4780e377, 'name': SearchDatastore_Task, 'duration_secs': 0.010563} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.162860] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.162860] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1752.162860] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.162860] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.163229] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1752.165928] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc58a14c-f100-4c39-b89f-157f6f2a609d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.176062] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1752.176062] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1752.177023] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-411bda91-783f-4b87-b248-7a3868bef53b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.186634] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1752.186634] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528109be-0576-6cc7-8a0d-e29ec94fdab0" [ 1752.186634] env[62510]: _type = "Task" [ 1752.186634] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.198472] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528109be-0576-6cc7-8a0d-e29ec94fdab0, 'name': SearchDatastore_Task, 'duration_secs': 0.009776} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.201203] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f115723c-43b1-4b76-b7d0-57d248782d85 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.210020] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1752.210020] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525cca28-bc73-b376-b9e1-b7ce257cc758" [ 1752.210020] env[62510]: _type = "Task" [ 1752.210020] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.222962] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525cca28-bc73-b376-b9e1-b7ce257cc758, 'name': SearchDatastore_Task, 'duration_secs': 0.010047} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.225133] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.225857] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 91a76cc7-7f82-42cf-a379-fc0ba3d04568/91a76cc7-7f82-42cf-a379-fc0ba3d04568.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1752.226368] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-663df9ad-cac5-4d75-809e-96f66ac80911 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.235226] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1752.235226] env[62510]: value = "task-1769098" [ 1752.235226] env[62510]: _type = "Task" [ 1752.235226] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.244936] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.404819] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769096, 'name': ReconfigVM_Task, 'duration_secs': 0.538713} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.405196] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Reconfigured VM instance instance-0000004c to attach disk [datastore1] volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365/volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1752.413307] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-148990af-cf59-45ec-81f6-9e1275bb44a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.424802] env[62510]: DEBUG nova.network.neutron [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Updated VIF entry in instance network info cache for port 9331bfef-61be-4509-82fc-39111423b497. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1752.425238] env[62510]: DEBUG nova.network.neutron [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Updating instance_info_cache with network_info: [{"id": "9331bfef-61be-4509-82fc-39111423b497", "address": "fa:16:3e:ec:2e:59", "network": {"id": "4dc68c52-bf74-434c-9d04-c2812539e5bc", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-2126530041-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb86ec297e4d458a8027a275ced1825f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9331bfef-61", "ovs_interfaceid": "9331bfef-61be-4509-82fc-39111423b497", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.435727] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1752.435727] env[62510]: value = "task-1769099" [ 1752.435727] env[62510]: _type = "Task" [ 1752.435727] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.452534] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.501124] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d826676a-c083-4949-96f7-f2377b096487 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.511551] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d2c6b4-fc89-49d4-9c4f-3f6108c8d3a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.559381] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3292bf13-bfeb-44c3-8c47-7275a47bdf27 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.569339] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769097, 'name': Rename_Task, 'duration_secs': 0.169082} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.570651] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1752.571120] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61b27f8d-2c21-46b3-9724-5cf84225e28f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.579467] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b6c4de-6fff-45fa-84a0-fa94c474351b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.585745] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1752.585745] env[62510]: value = "task-1769100" [ 1752.585745] env[62510]: _type = "Task" [ 1752.585745] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.610198] env[62510]: DEBUG nova.compute.provider_tree [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.620296] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769100, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.620296] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769092, 'name': Rename_Task, 'duration_secs': 1.176172} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.620296] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1752.620296] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef70c0b6-f8c6-46ca-b7e4-a034cea6e81f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.629497] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1752.629497] env[62510]: value = "task-1769101" [ 1752.629497] env[62510]: _type = "Task" [ 1752.629497] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.638624] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.746744] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769098, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.823735] env[62510]: DEBUG nova.compute.manager [req-b8bdc4af-719a-4dc0-9bd2-b907cb2a2485 req-f88319dd-817a-447e-bb47-5cfacba1612d service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Received event network-vif-deleted-36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1752.823804] env[62510]: INFO nova.compute.manager [req-b8bdc4af-719a-4dc0-9bd2-b907cb2a2485 req-f88319dd-817a-447e-bb47-5cfacba1612d service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Neutron deleted interface 36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25; detaching it from the instance and deleting it from the info cache [ 1752.824257] env[62510]: DEBUG nova.network.neutron [req-b8bdc4af-719a-4dc0-9bd2-b907cb2a2485 req-f88319dd-817a-447e-bb47-5cfacba1612d service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.838599] env[62510]: DEBUG nova.network.neutron [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.929653] env[62510]: DEBUG oslo_concurrency.lockutils [req-74bade86-4ab2-40df-bc6a-239415940578 req-b031b56e-316f-4b70-af9a-5cc6b4a4bfa2 service nova] Releasing lock "refresh_cache-91a76cc7-7f82-42cf-a379-fc0ba3d04568" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.948020] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.070830] env[62510]: DEBUG nova.network.neutron [-] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.098503] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769100, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.120456] env[62510]: DEBUG nova.scheduler.client.report [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1753.143572] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769101, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.254740] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769098, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568264} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.254740] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 91a76cc7-7f82-42cf-a379-fc0ba3d04568/91a76cc7-7f82-42cf-a379-fc0ba3d04568.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1753.254740] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1753.254740] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df43e83f-bf02-46f9-b74f-8d4d9978444f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.262882] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1753.262882] env[62510]: value = "task-1769102" [ 1753.262882] env[62510]: _type = "Task" [ 1753.262882] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.274941] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769102, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.329015] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f593926b-e449-4214-9dfd-bb4bcc86eb56 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.338608] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da19c0b0-7d20-4019-9a39-d21db3fba180 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.356035] env[62510]: INFO nova.compute.manager [-] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Took 1.63 seconds to deallocate network for instance. [ 1753.388317] env[62510]: DEBUG nova.compute.manager [req-b8bdc4af-719a-4dc0-9bd2-b907cb2a2485 req-f88319dd-817a-447e-bb47-5cfacba1612d service nova] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Detach interface failed, port_id=36a0ae52-841d-4fba-ab7b-a2c6ca6c6f25, reason: Instance e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1753.450851] env[62510]: DEBUG oslo_vmware.api [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769099, 'name': ReconfigVM_Task, 'duration_secs': 0.896027} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.451986] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367404', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'name': 'volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '774ea198-c933-449a-8380-2e4cc9327389', 'attached_at': '', 'detached_at': '', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'serial': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1753.488091] env[62510]: DEBUG nova.compute.manager [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Received event network-vif-deleted-c2d4507d-2fd6-466d-9025-685dbebc79f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1753.488225] env[62510]: DEBUG nova.compute.manager [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Received event network-changed-4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1753.488377] env[62510]: DEBUG nova.compute.manager [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Refreshing instance network info cache due to event network-changed-4568ba9b-dd3d-4796-bcfc-7bf80545a66b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1753.488861] env[62510]: DEBUG oslo_concurrency.lockutils [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] Acquiring lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.490178] env[62510]: DEBUG oslo_concurrency.lockutils [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] Acquired lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.490476] env[62510]: DEBUG nova.network.neutron [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Refreshing network info cache for port 4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1753.573759] env[62510]: INFO nova.compute.manager [-] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Took 2.94 seconds to deallocate network for instance. [ 1753.598756] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769100, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.630401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.576s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.631106] env[62510]: INFO nova.compute.manager [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Migrating [ 1753.642429] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.014s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.643164] env[62510]: DEBUG nova.objects.instance [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lazy-loading 'resources' on Instance uuid 2dce738b-9624-4a74-8b8c-042e45b693b0 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1753.667206] env[62510]: DEBUG oslo_vmware.api [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769101, 'name': PowerOnVM_Task, 'duration_secs': 0.552098} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.667500] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1753.667698] env[62510]: INFO nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Took 17.22 seconds to spawn the instance on the hypervisor. [ 1753.667872] env[62510]: DEBUG nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1753.669519] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add89cd5-d2b1-47c8-95bb-e90c0be106cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.777613] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769102, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153516} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.777613] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1753.778393] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3f88a7-6df1-4029-978c-40ba348a20ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.808085] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 91a76cc7-7f82-42cf-a379-fc0ba3d04568/91a76cc7-7f82-42cf-a379-fc0ba3d04568.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1753.808988] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0675e091-e17c-4b29-ac02-8c64b5841650 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.845091] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1753.845091] env[62510]: value = "task-1769103" [ 1753.845091] env[62510]: _type = "Task" [ 1753.845091] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.855724] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769103, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.866845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.084790] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.098840] env[62510]: DEBUG oslo_vmware.api [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769100, 'name': PowerOnVM_Task, 'duration_secs': 1.113453} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.099355] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1754.099610] env[62510]: INFO nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Took 12.58 seconds to spawn the instance on the hypervisor. [ 1754.099799] env[62510]: DEBUG nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1754.100778] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64d9f64-dd61-4b6d-b320-3b1f683cfb7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.163337] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.163337] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.163337] env[62510]: DEBUG nova.network.neutron [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.196345] env[62510]: INFO nova.compute.manager [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Took 39.58 seconds to build instance. [ 1754.360381] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769103, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.476066] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.476694] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.477104] env[62510]: DEBUG nova.network.neutron [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1754.539344] env[62510]: DEBUG nova.objects.instance [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'flavor' on Instance uuid 774ea198-c933-449a-8380-2e4cc9327389 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.594267] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0140589b-a893-4160-bf17-dd99bafc6ebf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.598028] env[62510]: DEBUG nova.network.neutron [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updated VIF entry in instance network info cache for port 4568ba9b-dd3d-4796-bcfc-7bf80545a66b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1754.598403] env[62510]: DEBUG nova.network.neutron [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updating instance_info_cache with network_info: [{"id": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "address": "fa:16:3e:13:17:23", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4568ba9b-dd", "ovs_interfaceid": "4568ba9b-dd3d-4796-bcfc-7bf80545a66b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.606263] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487ad05d-a37d-4948-afa8-edd035d6b47e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.651134] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b328f56d-cea4-4e2a-b129-bf870e77690d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.653631] env[62510]: INFO nova.compute.manager [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Took 30.70 seconds to build instance. [ 1754.661380] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63f12de-4131-4df4-991f-f48b2a0476d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.681320] env[62510]: DEBUG nova.compute.provider_tree [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1754.699401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3a92d453-e222-433b-af23-3d69be3fd20b tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "144052ab-e3e7-401f-9edb-d8088780e468" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.094s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.857915] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769103, 'name': ReconfigVM_Task, 'duration_secs': 0.93536} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.858418] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 91a76cc7-7f82-42cf-a379-fc0ba3d04568/91a76cc7-7f82-42cf-a379-fc0ba3d04568.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1754.859444] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1b98b8a-9336-4f53-bc13-bac11f323137 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.867124] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1754.867124] env[62510]: value = "task-1769104" [ 1754.867124] env[62510]: _type = "Task" [ 1754.867124] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.878401] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769104, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.045318] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2292b87f-4e2f-4f1a-8429-f0033bea5efb tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.903s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.101630] env[62510]: DEBUG oslo_concurrency.lockutils [req-35500298-c0cd-4acf-abcf-d902df4e5c5c req-c29d9c4a-08ad-4824-b017-e327e4f9dd09 service nova] Releasing lock "refresh_cache-bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.155963] env[62510]: DEBUG oslo_concurrency.lockutils [None req-021df941-b701-4420-a644-6902fec7cf2d tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8b079310-084b-4ba0-8a82-57d64f421c11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.231s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.186180] env[62510]: DEBUG nova.scheduler.client.report [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1755.386947] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769104, 'name': Rename_Task, 'duration_secs': 0.265907} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.386947] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1755.386947] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ab95d58-6270-4568-a636-56994914a793 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.394063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "774ea198-c933-449a-8380-2e4cc9327389" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.394746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.394746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "774ea198-c933-449a-8380-2e4cc9327389-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.394926] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.395436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.400513] env[62510]: DEBUG nova.network.neutron [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance_info_cache with network_info: [{"id": "9174aa7f-56a1-4625-be49-9a7f645e961b", "address": "fa:16:3e:c3:e4:7b", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9174aa7f-56", "ovs_interfaceid": "9174aa7f-56a1-4625-be49-9a7f645e961b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.402308] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1755.402308] env[62510]: value = "task-1769105" [ 1755.402308] env[62510]: _type = "Task" [ 1755.402308] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.402782] env[62510]: INFO nova.compute.manager [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Terminating instance [ 1755.421509] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769105, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.512556] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "dabc046f-10f5-43d8-90f8-507dcb4d0144" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.512940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.653020] env[62510]: DEBUG nova.network.neutron [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fb40b1-13", "ovs_interfaceid": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.692698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1755.695029] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.178s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.695284] env[62510]: DEBUG nova.objects.instance [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lazy-loading 'resources' on Instance uuid 568ce58c-9ce5-4b40-988f-f31d8e0c376d {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1755.742009] env[62510]: INFO nova.scheduler.client.report [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleted allocations for instance 2dce738b-9624-4a74-8b8c-042e45b693b0 [ 1755.905836] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.912303] env[62510]: DEBUG nova.compute.manager [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1755.915232] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1755.915232] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19776cb1-c41d-4565-a943-c1eb5c9854c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.921880] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769105, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.924905] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1755.924905] env[62510]: value = "task-1769106" [ 1755.924905] env[62510]: _type = "Task" [ 1755.924905] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.936849] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.015703] env[62510]: DEBUG nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1756.077667] env[62510]: DEBUG nova.compute.manager [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received event network-vif-plugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1756.077968] env[62510]: DEBUG oslo_concurrency.lockutils [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.081521] env[62510]: DEBUG oslo_concurrency.lockutils [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.081986] env[62510]: DEBUG oslo_concurrency.lockutils [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.081986] env[62510]: DEBUG nova.compute.manager [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] No waiting events found dispatching network-vif-plugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1756.082100] env[62510]: WARNING nova.compute.manager [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received unexpected event network-vif-plugged-13fb40b1-132b-407d-b6e0-eec141ae88a8 for instance with vm_state shelved_offloaded and task_state spawning. [ 1756.082831] env[62510]: DEBUG nova.compute.manager [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received event network-changed-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1756.082831] env[62510]: DEBUG nova.compute.manager [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Refreshing instance network info cache due to event network-changed-13fb40b1-132b-407d-b6e0-eec141ae88a8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1756.082831] env[62510]: DEBUG oslo_concurrency.lockutils [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] Acquiring lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1756.160956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.163700] env[62510]: DEBUG oslo_concurrency.lockutils [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] Acquired lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1756.164022] env[62510]: DEBUG nova.network.neutron [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Refreshing network info cache for port 13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='5350379ed8bf85eb2f8c6e7de1ac32d4',container_format='bare',created_at=2024-12-11T19:39:47Z,direct_url=,disk_format='vmdk',id=37ce35c0-4c91-45fb-b27b-04201e3f0d27,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-761365525-shelved',owner='3f85ce3c02964d36a77221ba8235978c',properties=ImageMetaProps,protected=,size=31590912,status='active',tags=,updated_at=2024-12-11T19:40:01Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1756.225182] env[62510]: DEBUG nova.virt.hardware [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1756.227376] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbad4aab-4467-4073-b3e2-03bcf2dfe726 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.238225] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edff1ee-1b06-4fc2-a553-9673d76809fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.261613] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:f9:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e85cbc56-fee0-41f7-bc70-64f31775ce92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13fb40b1-132b-407d-b6e0-eec141ae88a8', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1756.271413] env[62510]: DEBUG oslo.service.loopingcall [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1756.272251] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9d31f393-7e58-49a5-a0ce-485bd4160059 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2dce738b-9624-4a74-8b8c-042e45b693b0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.313s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.276960] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1756.278165] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-247f9d3e-9e55-49f4-a6b2-35b81e6f5863 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.304228] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1756.304228] env[62510]: value = "task-1769107" [ 1756.304228] env[62510]: _type = "Task" [ 1756.304228] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.317021] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769107, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.432470] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769105, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.447088] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769106, 'name': PowerOffVM_Task, 'duration_secs': 0.419551} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.447501] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1756.447697] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1756.448017] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367404', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'name': 'volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '774ea198-c933-449a-8380-2e4cc9327389', 'attached_at': '', 'detached_at': '', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'serial': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1756.449157] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8716f8-02b1-497f-82dd-77d6f8446fd5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.479747] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52e8ce8-16f0-40d2-8f34-2fc4ad372b8a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.491701] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1320fe76-78cb-4ef8-a038-7a6cb63aceab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.519447] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7b4ddb-ad7c-4c59-8ae4-fb412de17c07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.525310] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.525545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.546178] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] The volume has not been displaced from its original location: [datastore1] volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365/volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1756.551648] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Reconfiguring VM instance instance-0000004c to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1756.552942] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.556037] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a1cfdfb-aefa-46f0-8165-5d5a51e7a0df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.581151] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1756.581151] env[62510]: value = "task-1769108" [ 1756.581151] env[62510]: _type = "Task" [ 1756.581151] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.594273] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769108, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.687192] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "144052ab-e3e7-401f-9edb-d8088780e468" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.687544] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "144052ab-e3e7-401f-9edb-d8088780e468" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.687805] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "144052ab-e3e7-401f-9edb-d8088780e468-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.688013] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "144052ab-e3e7-401f-9edb-d8088780e468-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.688342] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "144052ab-e3e7-401f-9edb-d8088780e468-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.690760] env[62510]: INFO nova.compute.manager [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Terminating instance [ 1756.769727] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63762344-0a47-411f-930d-03d1478fb3b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.777257] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c47768d-a826-4d87-b8cc-f555a618fca4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.817336] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf671ec-492f-49ba-b7dd-da393eea9c57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.826274] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769107, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.829609] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245850cf-9d98-47b7-8bd8-eebefcedc7a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.845632] env[62510]: DEBUG nova.compute.provider_tree [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1756.923661] env[62510]: DEBUG oslo_vmware.api [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769105, 'name': PowerOnVM_Task, 'duration_secs': 1.249754} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.924075] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1756.924405] env[62510]: INFO nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Took 8.91 seconds to spawn the instance on the hypervisor. [ 1756.924682] env[62510]: DEBUG nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1756.926314] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5e8f80-bcfe-4245-beb0-f0e715551985 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.033306] env[62510]: DEBUG nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1757.097881] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769108, 'name': ReconfigVM_Task, 'duration_secs': 0.405273} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.098274] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Reconfigured VM instance instance-0000004c to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1757.105074] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adc38c22-c52b-42f4-bfe4-307c0e2585bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.128041] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1757.128041] env[62510]: value = "task-1769109" [ 1757.128041] env[62510]: _type = "Task" [ 1757.128041] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.139177] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769109, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.140103] env[62510]: DEBUG nova.network.neutron [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updated VIF entry in instance network info cache for port 13fb40b1-132b-407d-b6e0-eec141ae88a8. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1757.140103] env[62510]: DEBUG nova.network.neutron [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fb40b1-13", "ovs_interfaceid": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.202064] env[62510]: DEBUG nova.compute.manager [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1757.202064] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1757.202064] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee6898e-2e03-4677-96c4-0a77b69227a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.211224] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1757.211851] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8785aa0-c80c-4a94-b999-863a6576b7e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.222428] env[62510]: DEBUG oslo_vmware.api [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1757.222428] env[62510]: value = "task-1769110" [ 1757.222428] env[62510]: _type = "Task" [ 1757.222428] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.235557] env[62510]: DEBUG oslo_vmware.api [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769110, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.328377] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769107, 'name': CreateVM_Task, 'duration_secs': 0.749194} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.328653] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1757.329650] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.330142] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.330628] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1757.331419] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf2d4c7e-2e94-4dda-9376-380feaae8da2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.340207] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1757.340207] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52423208-1cc0-42ee-bdf1-067b1ec378a3" [ 1757.340207] env[62510]: _type = "Task" [ 1757.340207] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.351132] env[62510]: DEBUG nova.scheduler.client.report [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1757.357386] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52423208-1cc0-42ee-bdf1-067b1ec378a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.430041] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8ab2a9-d589-4526-989e-4e243fb40b3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.446145] env[62510]: INFO nova.compute.manager [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Took 26.64 seconds to build instance. [ 1757.464283] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance '77f485ae-9c4c-424e-8bac-6d023e428767' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1757.565543] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.643854] env[62510]: DEBUG oslo_concurrency.lockutils [req-aae673f9-9e5c-485d-86a6-50da62eba134 req-5a5cff78-4b85-4011-acf9-0225e345e953 service nova] Releasing lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.644351] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769109, 'name': ReconfigVM_Task, 'duration_secs': 0.24081} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.644681] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367404', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'name': 'volume-93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '774ea198-c933-449a-8380-2e4cc9327389', 'attached_at': '', 'detached_at': '', 'volume_id': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365', 'serial': '93b8a9cb-15bb-446a-ad4f-c4a8d4e4a365'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1757.645159] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1757.645866] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48afaa67-7096-4647-8eec-104b01383c39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.655698] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1757.655820] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7c12361-777a-49d1-9bf9-ec2a682f9f7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.733243] env[62510]: DEBUG oslo_vmware.api [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769110, 'name': PowerOffVM_Task, 'duration_secs': 0.252843} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.733377] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1757.733475] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1757.733721] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7568ab70-28f5-40fb-a1cc-7692ffd43089 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.742222] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.742449] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.815842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "8b079310-084b-4ba0-8a82-57d64f421c11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.816155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8b079310-084b-4ba0-8a82-57d64f421c11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.816486] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "8b079310-084b-4ba0-8a82-57d64f421c11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.816839] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8b079310-084b-4ba0-8a82-57d64f421c11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.817147] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8b079310-084b-4ba0-8a82-57d64f421c11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.821354] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1757.821354] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1757.821354] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleting the datastore file [datastore1] 774ea198-c933-449a-8380-2e4cc9327389 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1757.821354] env[62510]: INFO nova.compute.manager [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Terminating instance [ 1757.822887] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5af638a-ba2e-4802-9e1b-d4b6a21131e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.831197] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1757.831197] env[62510]: value = "task-1769113" [ 1757.831197] env[62510]: _type = "Task" [ 1757.831197] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.849575] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.851072] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1757.851072] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1757.851072] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleting the datastore file [datastore1] 144052ab-e3e7-401f-9edb-d8088780e468 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1757.851072] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af47e9f8-b7af-4664-9b4d-4a56e69599c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.858225] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.859140] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Processing image 37ce35c0-4c91-45fb-b27b-04201e3f0d27 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1757.859140] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27/37ce35c0-4c91-45fb-b27b-04201e3f0d27.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.859140] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "[datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27/37ce35c0-4c91-45fb-b27b-04201e3f0d27.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.859316] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1757.860116] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.862193] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd2f40d0-d617-4a0e-b539-58960053acc5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.865994] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.866400] env[62510]: DEBUG nova.objects.instance [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lazy-loading 'resources' on Instance uuid e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1757.868247] env[62510]: DEBUG oslo_vmware.api [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1757.868247] env[62510]: value = "task-1769114" [ 1757.868247] env[62510]: _type = "Task" [ 1757.868247] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.879070] env[62510]: DEBUG oslo_vmware.api [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769114, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.885420] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1757.885718] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1757.887574] env[62510]: INFO nova.scheduler.client.report [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Deleted allocations for instance 568ce58c-9ce5-4b40-988f-f31d8e0c376d [ 1757.888684] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76c3696a-5dad-415b-845f-3ba7c1ace7a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.907110] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1757.907110] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5246765a-7665-f45c-6c41-5a0aaa12f35f" [ 1757.907110] env[62510]: _type = "Task" [ 1757.907110] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.923616] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5246765a-7665-f45c-6c41-5a0aaa12f35f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.948599] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8f994d79-29c7-4da9-814a-198d420098a8 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.158s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.970785] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1757.971168] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38b87595-55a4-4881-9273-34d44951989d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.979439] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1757.979439] env[62510]: value = "task-1769115" [ 1757.979439] env[62510]: _type = "Task" [ 1757.979439] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.989797] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.245198] env[62510]: DEBUG nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1758.330360] env[62510]: DEBUG nova.compute.manager [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1758.330722] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1758.331497] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a4bbd2-b0ce-44f4-a180-8a905c2553a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.343740] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1758.346966] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a063dc21-098f-4fa2-ac0d-4b54cc66c783 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.348860] env[62510]: DEBUG oslo_vmware.api [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172473} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.349168] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1758.349414] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1758.349628] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1758.349915] env[62510]: INFO nova.compute.manager [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Took 2.44 seconds to destroy the instance on the hypervisor. [ 1758.350125] env[62510]: DEBUG oslo.service.loopingcall [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.350709] env[62510]: DEBUG nova.compute.manager [-] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1758.350833] env[62510]: DEBUG nova.network.neutron [-] [instance: 774ea198-c933-449a-8380-2e4cc9327389] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1758.357322] env[62510]: DEBUG oslo_vmware.api [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1758.357322] env[62510]: value = "task-1769116" [ 1758.357322] env[62510]: _type = "Task" [ 1758.357322] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.368861] env[62510]: DEBUG oslo_vmware.api [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769116, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.380423] env[62510]: DEBUG oslo_vmware.api [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769114, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182635} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.380681] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1758.380873] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1758.381060] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1758.381242] env[62510]: INFO nova.compute.manager [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1758.381484] env[62510]: DEBUG oslo.service.loopingcall [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1758.381671] env[62510]: DEBUG nova.compute.manager [-] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1758.381767] env[62510]: DEBUG nova.network.neutron [-] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1758.400964] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0908cdd1-c9d3-4214-a381-feec552ed30e tempest-ServersTestBootFromVolume-1003948680 tempest-ServersTestBootFromVolume-1003948680-project-member] Lock "568ce58c-9ce5-4b40-988f-f31d8e0c376d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.645s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.421733] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Preparing fetch location {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1758.422159] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Fetch image to [datastore1] OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0/OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0.vmdk {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1758.422416] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Downloading stream optimized image 37ce35c0-4c91-45fb-b27b-04201e3f0d27 to [datastore1] OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0/OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0.vmdk on the data store datastore1 as vApp {{(pid=62510) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1758.422669] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Downloading image file data 37ce35c0-4c91-45fb-b27b-04201e3f0d27 to the ESX as VM named 'OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0' {{(pid=62510) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1758.496287] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769115, 'name': PowerOffVM_Task, 'duration_secs': 0.210251} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.497821] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1758.498070] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance '77f485ae-9c4c-424e-8bac-6d023e428767' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1758.549209] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1758.549209] env[62510]: value = "resgroup-9" [ 1758.549209] env[62510]: _type = "ResourcePool" [ 1758.549209] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1758.550417] env[62510]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-af24d164-7aa4-406a-8a8e-1c29450a0cdb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.582700] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lease: (returnval){ [ 1758.582700] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5273bd0f-f8f0-18a0-c30a-487886849e95" [ 1758.582700] env[62510]: _type = "HttpNfcLease" [ 1758.582700] env[62510]: } obtained for vApp import into resource pool (val){ [ 1758.582700] env[62510]: value = "resgroup-9" [ 1758.582700] env[62510]: _type = "ResourcePool" [ 1758.582700] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1758.583249] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the lease: (returnval){ [ 1758.583249] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5273bd0f-f8f0-18a0-c30a-487886849e95" [ 1758.583249] env[62510]: _type = "HttpNfcLease" [ 1758.583249] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1758.600165] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1758.600165] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5273bd0f-f8f0-18a0-c30a-487886849e95" [ 1758.600165] env[62510]: _type = "HttpNfcLease" [ 1758.600165] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1758.690802] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.691251] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.768631] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.815946] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfe35bb-2ea7-42d4-a4a8-eb346e09d898 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.825854] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e27abe-6f4b-48cc-a9e5-31daf31069e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.863597] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3fa70c-355c-4336-a2cb-ce5efeb6a5b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.874958] env[62510]: DEBUG oslo_vmware.api [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769116, 'name': PowerOffVM_Task, 'duration_secs': 0.264975} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.875435] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1758.875750] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1758.877161] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73e9db3-a7ec-4471-a072-6b17e0616b4d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.881320] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b7ab22d-04a4-40e6-a410-105b30e55aec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.895373] env[62510]: DEBUG nova.compute.provider_tree [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.008288] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1759.008288] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1759.008288] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1759.008288] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1759.008288] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1759.008288] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1759.008730] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1759.009055] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1759.009397] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1759.009695] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1759.010097] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1759.015478] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bbf3b4a-c380-4674-add4-e13d3875c507 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.043460] env[62510]: DEBUG nova.compute.manager [req-100dfeda-76b5-4479-99bb-c726dfc58a1e req-4972b363-036e-4dcc-aa74-a07034549d69 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Received event network-vif-deleted-94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1759.043460] env[62510]: INFO nova.compute.manager [req-100dfeda-76b5-4479-99bb-c726dfc58a1e req-4972b363-036e-4dcc-aa74-a07034549d69 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Neutron deleted interface 94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec; detaching it from the instance and deleting it from the info cache [ 1759.043460] env[62510]: DEBUG nova.network.neutron [req-100dfeda-76b5-4479-99bb-c726dfc58a1e req-4972b363-036e-4dcc-aa74-a07034549d69 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.044537] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1759.044537] env[62510]: value = "task-1769119" [ 1759.044537] env[62510]: _type = "Task" [ 1759.044537] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.053444] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1759.053444] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1759.053444] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleting the datastore file [datastore1] 8b079310-084b-4ba0-8a82-57d64f421c11 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1759.053444] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2aa68a9-2d91-4e3d-9e22-8c5760012437 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.064737] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769119, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.067240] env[62510]: DEBUG oslo_vmware.api [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1759.067240] env[62510]: value = "task-1769120" [ 1759.067240] env[62510]: _type = "Task" [ 1759.067240] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.080601] env[62510]: DEBUG nova.compute.manager [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1759.081119] env[62510]: DEBUG oslo_vmware.api [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769120, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.082566] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85673b4a-9680-428e-9091-fa2418ad7f56 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.097389] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1759.097389] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5273bd0f-f8f0-18a0-c30a-487886849e95" [ 1759.097389] env[62510]: _type = "HttpNfcLease" [ 1759.097389] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1759.195028] env[62510]: DEBUG nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1759.399443] env[62510]: DEBUG nova.scheduler.client.report [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1759.448746] env[62510]: DEBUG nova.network.neutron [-] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.544541] env[62510]: DEBUG nova.network.neutron [-] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.546430] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b274c2c-0ac7-4fe9-9b77-c8e89cc2360d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.557236] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769119, 'name': ReconfigVM_Task, 'duration_secs': 0.213536} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.558920] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance '77f485ae-9c4c-424e-8bac-6d023e428767' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.565116] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f540e00b-f6eb-4e00-91d3-19bafb1e9a07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.592990] env[62510]: DEBUG oslo_vmware.api [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769120, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174834} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.605598] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1759.605744] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1759.605927] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1759.606120] env[62510]: INFO nova.compute.manager [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1759.606365] env[62510]: DEBUG oslo.service.loopingcall [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.607326] env[62510]: INFO nova.compute.manager [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] instance snapshotting [ 1759.608870] env[62510]: DEBUG nova.compute.manager [req-100dfeda-76b5-4479-99bb-c726dfc58a1e req-4972b363-036e-4dcc-aa74-a07034549d69 service nova] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Detach interface failed, port_id=94db8ca3-7767-44f8-8cb6-bc0a7b18b7ec, reason: Instance 774ea198-c933-449a-8380-2e4cc9327389 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1759.609263] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1759.609263] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5273bd0f-f8f0-18a0-c30a-487886849e95" [ 1759.609263] env[62510]: _type = "HttpNfcLease" [ 1759.609263] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1759.610253] env[62510]: DEBUG nova.compute.manager [-] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1759.610357] env[62510]: DEBUG nova.network.neutron [-] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1759.611918] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1759.611918] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5273bd0f-f8f0-18a0-c30a-487886849e95" [ 1759.611918] env[62510]: _type = "HttpNfcLease" [ 1759.611918] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1759.612624] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5052ce32-6d3a-4885-bdd0-ab0494717e06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.615648] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe302a0-e873-48fe-a6e0-a3848c642796 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.625532] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aee33d-04f0-96c1-b3d4-a3354d24c703/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1759.625709] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating HTTP connection to write to file with size = 31590912 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aee33d-04f0-96c1-b3d4-a3354d24c703/disk-0.vmdk. {{(pid=62510) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1759.640819] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594c38d7-1ee3-4d87-8562-0338b4ebb94c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.711260] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-791dce4a-7f33-416e-9b68-e832f2a639f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.726625] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.911545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.045s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.915953] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.831s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.915953] env[62510]: DEBUG nova.objects.instance [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lazy-loading 'resources' on Instance uuid 3533a113-6f46-4b18-872d-9bc1b0481969 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1759.945294] env[62510]: INFO nova.scheduler.client.report [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Deleted allocations for instance e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7 [ 1759.952861] env[62510]: INFO nova.compute.manager [-] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Took 1.60 seconds to deallocate network for instance. [ 1760.047126] env[62510]: INFO nova.compute.manager [-] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Took 1.67 seconds to deallocate network for instance. [ 1760.078335] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1760.078335] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1760.078335] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1760.079147] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1760.079291] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1760.079520] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1760.080536] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1760.080536] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1760.080536] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1760.080536] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1760.080536] env[62510]: DEBUG nova.virt.hardware [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1760.086087] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Reconfiguring VM instance instance-0000004b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1760.086387] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-269afc27-cbc0-4922-89b7-6b539a4331b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.109590] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1760.109590] env[62510]: value = "task-1769121" [ 1760.109590] env[62510]: _type = "Task" [ 1760.109590] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.119312] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769121, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.212847] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1760.212847] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-71e020d4-dcb8-4900-a3f5-ea8412ad07a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.222445] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1760.222445] env[62510]: value = "task-1769122" [ 1760.222445] env[62510]: _type = "Task" [ 1760.222445] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.234507] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769122, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.456682] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b44b1d88-334b-4b50-9d0b-c14503b5f0b7 tempest-ServersAdminTestJSON-1135699744 tempest-ServersAdminTestJSON-1135699744-project-member] Lock "e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.522s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.462181] env[62510]: DEBUG nova.network.neutron [-] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.549800] env[62510]: INFO nova.compute.manager [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Took 0.59 seconds to detach 1 volumes for instance. [ 1760.555212] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.630402] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769121, 'name': ReconfigVM_Task, 'duration_secs': 0.196525} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.633292] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Reconfigured VM instance instance-0000004b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1760.637158] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43a0550-2ba3-4d79-920f-5ddf88719143 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.666344] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 77f485ae-9c4c-424e-8bac-6d023e428767/77f485ae-9c4c-424e-8bac-6d023e428767.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1760.674362] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2881822-0988-47f3-af53-9c4c62772497 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.702560] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1760.702560] env[62510]: value = "task-1769123" [ 1760.702560] env[62510]: _type = "Task" [ 1760.702560] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.715493] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769123, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.738116] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769122, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.857879] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1d564a-b712-4ccc-abba-83b119b8f80f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.869752] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ae379f-c86e-42e1-afcd-7e4a1a620283 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.670346] env[62510]: INFO nova.compute.manager [-] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Took 2.06 seconds to deallocate network for instance. [ 1761.671431] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.682644] env[62510]: DEBUG nova.compute.manager [req-861fa815-d2e7-41f8-9a37-f1373794a104 req-6d2d9a12-2472-4f91-b9ad-55470dcd4b65 service nova] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Received event network-vif-deleted-ff1d0698-2d5b-4d43-b576-87f41d637e5a {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1761.682837] env[62510]: DEBUG nova.compute.manager [req-861fa815-d2e7-41f8-9a37-f1373794a104 req-6d2d9a12-2472-4f91-b9ad-55470dcd4b65 service nova] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Received event network-vif-deleted-764fd77c-e3bd-42f6-b51b-0a6c9e718b34 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1761.690504] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991ca58d-d653-43f1-946b-0e87c739b020 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.706421] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769123, 'name': ReconfigVM_Task, 'duration_secs': 0.370491} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.714173] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 77f485ae-9c4c-424e-8bac-6d023e428767/77f485ae-9c4c-424e-8bac-6d023e428767.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1761.714173] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance '77f485ae-9c4c-424e-8bac-6d023e428767' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1761.715696] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769122, 'name': CreateSnapshot_Task, 'duration_secs': 0.689102} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.716509] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1761.717780] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c93dcfc-b44f-40f8-937f-030a7aab4eb2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.722393] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0971d0-78f8-4b2c-bfc4-1fdd1ddb281a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.742216] env[62510]: DEBUG nova.compute.provider_tree [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.147812] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Completed reading data from the image iterator. {{(pid=62510) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1762.147812] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aee33d-04f0-96c1-b3d4-a3354d24c703/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1762.147986] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b96120f-e11c-4f66-8879-41765f55d188 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.156290] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aee33d-04f0-96c1-b3d4-a3354d24c703/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1762.156673] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aee33d-04f0-96c1-b3d4-a3354d24c703/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1762.156929] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-20093e5e-3077-423f-aca2-c1a15ba29d9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.201297] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.219523] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8ecf9b-3c06-480a-afce-32a9f271875b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.255624] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1762.256629] env[62510]: DEBUG nova.scheduler.client.report [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1762.261509] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-878b1ba3-5673-4847-ba22-9e560d6d634c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.265364] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f9f027-65c9-4d52-9060-2f0ad1877a2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.286866] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance '77f485ae-9c4c-424e-8bac-6d023e428767' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1762.292640] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1762.292640] env[62510]: value = "task-1769124" [ 1762.292640] env[62510]: _type = "Task" [ 1762.292640] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.302182] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769124, 'name': CloneVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.386436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.386436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.386436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.386436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.386436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.389153] env[62510]: INFO nova.compute.manager [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Terminating instance [ 1762.694985] env[62510]: DEBUG oslo_vmware.rw_handles [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aee33d-04f0-96c1-b3d4-a3354d24c703/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1762.695457] env[62510]: INFO nova.virt.vmwareapi.images [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Downloaded image file data 37ce35c0-4c91-45fb-b27b-04201e3f0d27 [ 1762.696271] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06a4d7e-6076-48c8-a6bb-e26aa012ec08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.714413] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-037d0f18-1a70-4aff-8ca4-2436fe611881 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.750641] env[62510]: INFO nova.virt.vmwareapi.images [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] The imported VM was unregistered [ 1762.756714] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Caching image {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1762.756714] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Creating directory with path [datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1762.756714] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ff0aa2a-f261-474a-b925-022d00011bf2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.771306] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.855s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.778943] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.226s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.780624] env[62510]: INFO nova.compute.claims [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1762.791023] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Created directory with path [datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1762.791023] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0/OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0.vmdk to [datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27/37ce35c0-4c91-45fb-b27b-04201e3f0d27.vmdk. {{(pid=62510) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1762.791023] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-781ce04f-537a-4db1-8259-8e3eaf863a6c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.802556] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1762.802556] env[62510]: value = "task-1769126" [ 1762.802556] env[62510]: _type = "Task" [ 1762.802556] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.808106] env[62510]: INFO nova.scheduler.client.report [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Deleted allocations for instance 3533a113-6f46-4b18-872d-9bc1b0481969 [ 1762.812315] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769124, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.823046] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.883731] env[62510]: DEBUG nova.network.neutron [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Port 9174aa7f-56a1-4625-be49-9a7f645e961b binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1762.899973] env[62510]: DEBUG nova.compute.manager [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1762.900187] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1762.901183] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730102c0-1943-4340-b7ed-70a9cade109e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.912061] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1762.912350] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-765f8f97-2c60-492c-ac13-76e8eb91e572 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.922719] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1762.922719] env[62510]: value = "task-1769127" [ 1762.922719] env[62510]: _type = "Task" [ 1762.922719] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.935844] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1769127, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.314907] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769124, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.321870] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.327510] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1abcf958-0cba-4235-86a3-264e1c5b7d24 tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "3533a113-6f46-4b18-872d-9bc1b0481969" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.468s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.441898] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1769127, 'name': PowerOffVM_Task, 'duration_secs': 0.281796} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.442241] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1763.442432] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1763.442780] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7cd340dc-db18-4d25-a731-1e7bdd85614e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.816101] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769124, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.820867] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.923862] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.923862] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.923862] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.239471] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e549ef67-75df-4f0a-9235-79a1304aa766 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.253823] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e5db53-4c57-4ae8-bb4b-acf79c90d1ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.297913] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08698d2f-0611-48f0-88bb-5454b986a025 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.315873] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae3841a9-d3d1-4bfc-9fed-ccd572a5ce20 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.320962] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769124, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.323126] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.347168] env[62510]: DEBUG nova.compute.provider_tree [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1764.672252] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1764.672533] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1764.672723] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Deleting the datastore file [datastore1] 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1764.673149] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3847bb11-68dd-4243-8e32-25b97d01b27c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.682335] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for the task: (returnval){ [ 1764.682335] env[62510]: value = "task-1769129" [ 1764.682335] env[62510]: _type = "Task" [ 1764.682335] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.694641] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1769129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.820992] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.820992] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769124, 'name': CloneVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.851523] env[62510]: DEBUG nova.scheduler.client.report [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1764.983874] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.984091] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.984286] env[62510]: DEBUG nova.network.neutron [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1765.196528] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1769129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.230374] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "f40078f0-af6b-480b-96e6-4117022c87e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.230683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "f40078f0-af6b-480b-96e6-4117022c87e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.323778] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769124, 'name': CloneVM_Task, 'duration_secs': 2.570944} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.328187] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Created linked-clone VM from snapshot [ 1765.329042] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.331044] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f046e15c-c087-4e34-86dd-a208cea14c6f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.340788] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Uploading image 16568b7b-b6fc-4a0b-a998-c12029e93388 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1765.357404] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.578s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.359709] env[62510]: DEBUG nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1765.361764] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.796s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.365474] env[62510]: INFO nova.compute.claims [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1765.383108] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1765.383108] env[62510]: value = "vm-367411" [ 1765.383108] env[62510]: _type = "VirtualMachine" [ 1765.383108] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1765.383750] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6d995645-1222-4058-a380-11dc2f4b8986 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.392715] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lease: (returnval){ [ 1765.392715] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52efe900-0d14-a646-4e10-6ca73feb6968" [ 1765.392715] env[62510]: _type = "HttpNfcLease" [ 1765.392715] env[62510]: } obtained for exporting VM: (result){ [ 1765.392715] env[62510]: value = "vm-367411" [ 1765.392715] env[62510]: _type = "VirtualMachine" [ 1765.392715] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1765.393128] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the lease: (returnval){ [ 1765.393128] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52efe900-0d14-a646-4e10-6ca73feb6968" [ 1765.393128] env[62510]: _type = "HttpNfcLease" [ 1765.393128] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1765.405517] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1765.405517] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52efe900-0d14-a646-4e10-6ca73feb6968" [ 1765.405517] env[62510]: _type = "HttpNfcLease" [ 1765.405517] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1765.697332] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1769129, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.732873] env[62510]: DEBUG nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1765.770374] env[62510]: DEBUG nova.network.neutron [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance_info_cache with network_info: [{"id": "9174aa7f-56a1-4625-be49-9a7f645e961b", "address": "fa:16:3e:c3:e4:7b", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9174aa7f-56", "ovs_interfaceid": "9174aa7f-56a1-4625-be49-9a7f645e961b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.822483] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.838155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.838318] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.872530] env[62510]: DEBUG nova.compute.utils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1765.875949] env[62510]: DEBUG nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1765.876098] env[62510]: DEBUG nova.network.neutron [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1765.902452] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1765.902452] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52efe900-0d14-a646-4e10-6ca73feb6968" [ 1765.902452] env[62510]: _type = "HttpNfcLease" [ 1765.902452] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1765.903354] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1765.903354] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52efe900-0d14-a646-4e10-6ca73feb6968" [ 1765.903354] env[62510]: _type = "HttpNfcLease" [ 1765.903354] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1765.904172] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e17ac23-a033-4356-aea9-eb5b075e68ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.912055] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52648f17-266f-5e35-088b-38e71b953533/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1765.912231] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52648f17-266f-5e35-088b-38e71b953533/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1765.974576] env[62510]: DEBUG nova.policy [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e591b2631edf4c64b04724c359560055', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '275751af13214e1ea84d4bd8b70097b1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1766.008857] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4138f803-bd15-4609-93e6-5fffdd8eff24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.162347] env[62510]: DEBUG oslo_concurrency.lockutils [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.162632] env[62510]: DEBUG oslo_concurrency.lockutils [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.196612] env[62510]: DEBUG oslo_vmware.api [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Task: {'id': task-1769129, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.258264} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.196919] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1766.197434] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1766.197661] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1766.197844] env[62510]: INFO nova.compute.manager [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Took 3.30 seconds to destroy the instance on the hypervisor. [ 1766.198109] env[62510]: DEBUG oslo.service.loopingcall [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1766.198498] env[62510]: DEBUG nova.compute.manager [-] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1766.198655] env[62510]: DEBUG nova.network.neutron [-] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1766.270405] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.274879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.328021] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769126, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.056553} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.328021] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0/OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0.vmdk to [datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27/37ce35c0-4c91-45fb-b27b-04201e3f0d27.vmdk. [ 1766.328021] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Cleaning up location [datastore1] OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1766.328021] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_fa537fb8-8631-4d71-9e97-8ccd10e4d8f0 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1766.328021] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de3da414-20b1-4a13-b3d9-096c8f7b0b60 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.334235] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1766.334235] env[62510]: value = "task-1769131" [ 1766.334235] env[62510]: _type = "Task" [ 1766.334235] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.341576] env[62510]: DEBUG nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1766.350288] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.383841] env[62510]: DEBUG nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1766.440151] env[62510]: DEBUG nova.network.neutron [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Successfully created port: d6cdea66-2edf-49fa-9c50-5293d0a33351 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1766.669022] env[62510]: INFO nova.compute.manager [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Detaching volume fc768519-1bbc-47ae-b9f1-9717554b1759 [ 1766.723270] env[62510]: INFO nova.virt.block_device [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Attempting to driver detach volume fc768519-1bbc-47ae-b9f1-9717554b1759 from mountpoint /dev/sdb [ 1766.725733] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1766.726297] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367365', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'name': 'volume-fc768519-1bbc-47ae-b9f1-9717554b1759', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '83fa0d32-18ee-401d-af0b-a0adb538e5f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'serial': 'fc768519-1bbc-47ae-b9f1-9717554b1759'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1766.728188] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ed0e93-ee5c-4edc-9347-d5f91d9b3217 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.771205] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68d9962-04fb-4be8-a402-d8dba4d30b4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.787874] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e503832-ac97-4f40-8d35-058b58f42650 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.826384] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c34ab85-3374-4cc9-9d6c-362cf54c8abf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.829926] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63d920a-4c69-4f90-8fe0-2c0e8c76e8b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.878738] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] The volume has not been displaced from its original location: [datastore1] volume-fc768519-1bbc-47ae-b9f1-9717554b1759/volume-fc768519-1bbc-47ae-b9f1-9717554b1759.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1766.888456] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfiguring VM instance instance-00000027 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1766.897214] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95991d29-8602-4991-9142-7a105e785685 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.901998] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f053fd8a-4785-406a-b478-afbb431307fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.938428] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.056908} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.938777] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1766.940397] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "[datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27/37ce35c0-4c91-45fb-b27b-04201e3f0d27.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.940397] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27/37ce35c0-4c91-45fb-b27b-04201e3f0d27.vmdk to [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1766.940397] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce7179b6-53cd-42ba-a8f6-ff3e11e98a55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.948312] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance '77f485ae-9c4c-424e-8bac-6d023e428767' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1766.958966] env[62510]: DEBUG oslo_vmware.api [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1766.958966] env[62510]: value = "task-1769132" [ 1766.958966] env[62510]: _type = "Task" [ 1766.958966] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.964392] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1766.964392] env[62510]: value = "task-1769133" [ 1766.964392] env[62510]: _type = "Task" [ 1766.964392] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.971648] env[62510]: DEBUG oslo_vmware.api [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.983052] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.983516] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.002479] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39a8ea5-63b3-47a1-b729-b5745c74269c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.013337] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b5bdfe-2d0a-43f3-8510-3c38f7495160 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.048874] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3933cc4-e4b0-42a7-a0fa-83baead690ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.058428] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764ddb7d-2b33-45d9-8d65-27623be1cc24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.075169] env[62510]: DEBUG nova.compute.provider_tree [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1767.284071] env[62510]: DEBUG nova.compute.manager [req-08bd664e-180b-4984-898b-80b208e5b1bd req-b1683ef9-b491-4d9c-ba64-47db3d5ef811 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Received event network-vif-deleted-9a53a8d4-8b7d-4167-b888-f20b2fce23c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1767.284246] env[62510]: INFO nova.compute.manager [req-08bd664e-180b-4984-898b-80b208e5b1bd req-b1683ef9-b491-4d9c-ba64-47db3d5ef811 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Neutron deleted interface 9a53a8d4-8b7d-4167-b888-f20b2fce23c5; detaching it from the instance and deleting it from the info cache [ 1767.284413] env[62510]: DEBUG nova.network.neutron [req-08bd664e-180b-4984-898b-80b208e5b1bd req-b1683ef9-b491-4d9c-ba64-47db3d5ef811 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.440071] env[62510]: DEBUG nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1767.462647] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1767.463072] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9b35749-776a-4fc2-ae2c-e50ca238da77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.477580] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1767.477754] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1767.477813] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1767.477981] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1767.478180] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1767.478283] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1767.478556] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1767.478762] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1767.478941] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1767.479381] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1767.479681] env[62510]: DEBUG nova.virt.hardware [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1767.480692] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8532788-a1fa-4438-8306-2c7f3d95d3bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.488360] env[62510]: DEBUG oslo_vmware.api [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769132, 'name': ReconfigVM_Task, 'duration_secs': 0.266106} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.489241] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1767.489241] env[62510]: value = "task-1769134" [ 1767.489241] env[62510]: _type = "Task" [ 1767.489241] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.489996] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Reconfigured VM instance instance-00000027 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1767.501458] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eff77b7-97b5-4a62-8e9e-556f0602e7fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.523047] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769133, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.524758] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004ef229-2312-401d-bbd7-b143f9153c08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.548795] env[62510]: DEBUG oslo_vmware.api [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1767.548795] env[62510]: value = "task-1769135" [ 1767.548795] env[62510]: _type = "Task" [ 1767.548795] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.549467] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.560963] env[62510]: DEBUG oslo_vmware.api [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769135, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.579992] env[62510]: DEBUG nova.scheduler.client.report [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1767.685847] env[62510]: DEBUG nova.network.neutron [-] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.787870] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a1ca0453-e57d-47b3-86e7-c9cca1407ec7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.803529] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04123318-c834-4501-8350-e4f8c11bbacc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.843039] env[62510]: DEBUG nova.compute.manager [req-08bd664e-180b-4984-898b-80b208e5b1bd req-b1683ef9-b491-4d9c-ba64-47db3d5ef811 service nova] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Detach interface failed, port_id=9a53a8d4-8b7d-4167-b888-f20b2fce23c5, reason: Instance 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1767.901239] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "31772dc9-4f04-42df-9e3b-3200cc72c977" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.901624] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "31772dc9-4f04-42df-9e3b-3200cc72c977" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.983339] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769133, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.010872] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.060667] env[62510]: DEBUG oslo_vmware.api [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769135, 'name': ReconfigVM_Task, 'duration_secs': 0.176706} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.060800] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367365', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'name': 'volume-fc768519-1bbc-47ae-b9f1-9717554b1759', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '83fa0d32-18ee-401d-af0b-a0adb538e5f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'fc768519-1bbc-47ae-b9f1-9717554b1759', 'serial': 'fc768519-1bbc-47ae-b9f1-9717554b1759'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1768.085385] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.724s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.086146] env[62510]: DEBUG nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1768.089829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.321s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.091515] env[62510]: INFO nova.compute.claims [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1768.189807] env[62510]: INFO nova.compute.manager [-] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Took 1.99 seconds to deallocate network for instance. [ 1768.190196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.190434] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.404570] env[62510]: DEBUG nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1768.484365] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769133, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.512699] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769134, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.596750] env[62510]: DEBUG nova.compute.utils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1768.601438] env[62510]: DEBUG nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1768.601673] env[62510]: DEBUG nova.network.neutron [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1768.621955] env[62510]: DEBUG nova.objects.instance [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'flavor' on Instance uuid 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1768.695840] env[62510]: DEBUG nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1768.699577] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.732348] env[62510]: DEBUG nova.policy [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8bab2df0cdfb4853879515120e93ce25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de0d125bba6242d3b9614402098efc1f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1768.775707] env[62510]: DEBUG nova.network.neutron [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Successfully updated port: d6cdea66-2edf-49fa-9c50-5293d0a33351 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1768.929977] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.984773] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769133, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.009979] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769134, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.105815] env[62510]: DEBUG nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1769.221197] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.259945] env[62510]: DEBUG nova.network.neutron [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Successfully created port: d3047f95-b766-4344-bc0c-ad2f1b9f55fd {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1769.281582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "refresh_cache-dabc046f-10f5-43d8-90f8-507dcb4d0144" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.281790] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquired lock "refresh_cache-dabc046f-10f5-43d8-90f8-507dcb4d0144" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.281887] env[62510]: DEBUG nova.network.neutron [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1769.331113] env[62510]: DEBUG nova.compute.manager [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Received event network-vif-plugged-d6cdea66-2edf-49fa-9c50-5293d0a33351 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1769.331113] env[62510]: DEBUG oslo_concurrency.lockutils [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] Acquiring lock "dabc046f-10f5-43d8-90f8-507dcb4d0144-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.331113] env[62510]: DEBUG oslo_concurrency.lockutils [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.331113] env[62510]: DEBUG oslo_concurrency.lockutils [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.331113] env[62510]: DEBUG nova.compute.manager [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] No waiting events found dispatching network-vif-plugged-d6cdea66-2edf-49fa-9c50-5293d0a33351 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1769.331113] env[62510]: WARNING nova.compute.manager [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Received unexpected event network-vif-plugged-d6cdea66-2edf-49fa-9c50-5293d0a33351 for instance with vm_state building and task_state spawning. [ 1769.331405] env[62510]: DEBUG nova.compute.manager [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Received event network-changed-d6cdea66-2edf-49fa-9c50-5293d0a33351 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1769.331405] env[62510]: DEBUG nova.compute.manager [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Refreshing instance network info cache due to event network-changed-d6cdea66-2edf-49fa-9c50-5293d0a33351. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1769.331572] env[62510]: DEBUG oslo_concurrency.lockutils [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] Acquiring lock "refresh_cache-dabc046f-10f5-43d8-90f8-507dcb4d0144" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.487467] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769133, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.514218] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769134, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.537172] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4c7a91-0d5e-4322-ada5-c77cca51697e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.546565] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c8f3bc-c0b3-40bc-a096-0f8480e62350 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.584369] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d836c4f-b11b-4726-bb8f-c4603cfead76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.597184] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88bb023-0c24-468c-a0ff-a969bd929101 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.621392] env[62510]: DEBUG nova.compute.provider_tree [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1769.632242] env[62510]: DEBUG oslo_concurrency.lockutils [None req-df0f4734-53d1-4fbd-9757-27f0801c701c tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.469s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.843048] env[62510]: DEBUG nova.network.neutron [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1769.991218] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769133, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.875122} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.991606] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/37ce35c0-4c91-45fb-b27b-04201e3f0d27/37ce35c0-4c91-45fb-b27b-04201e3f0d27.vmdk to [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1769.992472] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69908461-f3f6-4057-bae6-ee94299779e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.016607] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1770.022032] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e67bcbcd-e2f4-4810-a7ed-2d67e7070fdd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.043906] env[62510]: DEBUG oslo_vmware.api [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769134, 'name': PowerOnVM_Task, 'duration_secs': 2.165904} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.045432] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1770.045643] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7de599be-9c79-41d5-9bc0-2d37a9e61e84 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance '77f485ae-9c4c-424e-8bac-6d023e428767' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1770.050053] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1770.050053] env[62510]: value = "task-1769136" [ 1770.050053] env[62510]: _type = "Task" [ 1770.050053] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.062760] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769136, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.092965] env[62510]: DEBUG nova.network.neutron [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Updating instance_info_cache with network_info: [{"id": "d6cdea66-2edf-49fa-9c50-5293d0a33351", "address": "fa:16:3e:30:1a:62", "network": {"id": "a7915935-f80a-4779-9b05-14618ff0da4f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1835649106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275751af13214e1ea84d4bd8b70097b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6cdea66-2e", "ovs_interfaceid": "d6cdea66-2edf-49fa-9c50-5293d0a33351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1770.124177] env[62510]: DEBUG nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1770.127291] env[62510]: DEBUG nova.scheduler.client.report [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1770.158136] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1770.158406] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1770.158701] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1770.158701] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1770.158831] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1770.158952] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1770.159151] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1770.159349] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1770.159570] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1770.159982] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1770.159982] env[62510]: DEBUG nova.virt.hardware [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1770.161531] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7948f81f-fe8e-45c1-9264-5660ba885323 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.170680] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29639c6-02e2-4a08-96ac-4279d3e38b1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.565387] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769136, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.596103] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Releasing lock "refresh_cache-dabc046f-10f5-43d8-90f8-507dcb4d0144" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.596591] env[62510]: DEBUG nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Instance network_info: |[{"id": "d6cdea66-2edf-49fa-9c50-5293d0a33351", "address": "fa:16:3e:30:1a:62", "network": {"id": "a7915935-f80a-4779-9b05-14618ff0da4f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1835649106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275751af13214e1ea84d4bd8b70097b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6cdea66-2e", "ovs_interfaceid": "d6cdea66-2edf-49fa-9c50-5293d0a33351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1770.597136] env[62510]: DEBUG oslo_concurrency.lockutils [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] Acquired lock "refresh_cache-dabc046f-10f5-43d8-90f8-507dcb4d0144" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.597374] env[62510]: DEBUG nova.network.neutron [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Refreshing network info cache for port d6cdea66-2edf-49fa-9c50-5293d0a33351 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1770.598834] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:1a:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '205fb402-8eaf-4b61-8f57-8f216024179a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6cdea66-2edf-49fa-9c50-5293d0a33351', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1770.608273] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Creating folder: Project (275751af13214e1ea84d4bd8b70097b1). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1770.612192] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d28a8c59-5ed2-4172-8c19-ac9a8ad22b96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.629664] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Created folder: Project (275751af13214e1ea84d4bd8b70097b1) in parent group-v367197. [ 1770.629882] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Creating folder: Instances. Parent ref: group-v367412. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1770.630534] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c602e1f4-7177-46b6-8846-2dd09c94aacb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.633468] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.544s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.634149] env[62510]: DEBUG nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1770.637193] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.911s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.638700] env[62510]: INFO nova.compute.claims [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1770.649646] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.653048] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.653048] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.653048] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.653048] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.653949] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Created folder: Instances in parent group-v367412. [ 1770.653949] env[62510]: DEBUG oslo.service.loopingcall [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1770.654261] env[62510]: INFO nova.compute.manager [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Terminating instance [ 1770.655737] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1770.655960] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab8790ac-9b36-4ff9-9df9-668a0913ef22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.680220] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1770.680220] env[62510]: value = "task-1769139" [ 1770.680220] env[62510]: _type = "Task" [ 1770.680220] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.689556] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769139, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.070434] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769136, 'name': ReconfigVM_Task, 'duration_secs': 0.620993} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.070766] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Reconfigured VM instance instance-00000033 to attach disk [datastore1] e7daad63-c802-4a86-bead-7e849064ed61/e7daad63-c802-4a86-bead-7e849064ed61.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1771.071630] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47c8798d-b3c2-471a-ad75-e1ceb9cc8c3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.079984] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1771.079984] env[62510]: value = "task-1769140" [ 1771.079984] env[62510]: _type = "Task" [ 1771.079984] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.093949] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769140, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.140099] env[62510]: DEBUG nova.network.neutron [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Successfully updated port: d3047f95-b766-4344-bc0c-ad2f1b9f55fd {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1771.153239] env[62510]: DEBUG nova.compute.utils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1771.160030] env[62510]: DEBUG nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1771.160239] env[62510]: DEBUG nova.network.neutron [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1771.174671] env[62510]: DEBUG nova.compute.manager [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1771.174936] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1771.176104] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8cd168-38fe-4ad6-a7a9-e6f1993ee471 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.186766] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1771.187543] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3561146-c0b0-479f-abf4-238e025a44ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.192959] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769139, 'name': CreateVM_Task, 'duration_secs': 0.381595} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.193158] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1771.193871] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.194065] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.194441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1771.196155] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd8c600a-627e-453b-9eec-88998eea4990 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.202101] env[62510]: DEBUG oslo_vmware.api [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1771.202101] env[62510]: value = "task-1769141" [ 1771.202101] env[62510]: _type = "Task" [ 1771.202101] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.211727] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1771.211727] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ae75b2-c59d-0680-b4ac-7f991a1ff8b5" [ 1771.211727] env[62510]: _type = "Task" [ 1771.211727] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.219703] env[62510]: DEBUG oslo_vmware.api [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769141, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.225581] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ae75b2-c59d-0680-b4ac-7f991a1ff8b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.229012] env[62510]: DEBUG nova.network.neutron [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Updated VIF entry in instance network info cache for port d6cdea66-2edf-49fa-9c50-5293d0a33351. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1771.229402] env[62510]: DEBUG nova.network.neutron [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Updating instance_info_cache with network_info: [{"id": "d6cdea66-2edf-49fa-9c50-5293d0a33351", "address": "fa:16:3e:30:1a:62", "network": {"id": "a7915935-f80a-4779-9b05-14618ff0da4f", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1835649106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "275751af13214e1ea84d4bd8b70097b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "205fb402-8eaf-4b61-8f57-8f216024179a", "external-id": "nsx-vlan-transportzone-78", "segmentation_id": 78, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6cdea66-2e", "ovs_interfaceid": "d6cdea66-2edf-49fa-9c50-5293d0a33351", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.262479] env[62510]: DEBUG nova.policy [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8bab2df0cdfb4853879515120e93ce25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de0d125bba6242d3b9614402098efc1f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1771.540963] env[62510]: DEBUG nova.compute.manager [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Received event network-vif-plugged-d3047f95-b766-4344-bc0c-ad2f1b9f55fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1771.541275] env[62510]: DEBUG oslo_concurrency.lockutils [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] Acquiring lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.541494] env[62510]: DEBUG oslo_concurrency.lockutils [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.541684] env[62510]: DEBUG oslo_concurrency.lockutils [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.541861] env[62510]: DEBUG nova.compute.manager [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] No waiting events found dispatching network-vif-plugged-d3047f95-b766-4344-bc0c-ad2f1b9f55fd {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1771.542042] env[62510]: WARNING nova.compute.manager [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Received unexpected event network-vif-plugged-d3047f95-b766-4344-bc0c-ad2f1b9f55fd for instance with vm_state building and task_state spawning. [ 1771.542209] env[62510]: DEBUG nova.compute.manager [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Received event network-changed-d3047f95-b766-4344-bc0c-ad2f1b9f55fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1771.542376] env[62510]: DEBUG nova.compute.manager [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Refreshing instance network info cache due to event network-changed-d3047f95-b766-4344-bc0c-ad2f1b9f55fd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1771.542561] env[62510]: DEBUG oslo_concurrency.lockutils [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] Acquiring lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.542716] env[62510]: DEBUG oslo_concurrency.lockutils [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] Acquired lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.542851] env[62510]: DEBUG nova.network.neutron [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Refreshing network info cache for port d3047f95-b766-4344-bc0c-ad2f1b9f55fd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1771.592625] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769140, 'name': Rename_Task, 'duration_secs': 0.245789} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.592862] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1771.593286] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6d31c25-2df4-4d98-a0ee-0612d296313f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.604811] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1771.604811] env[62510]: value = "task-1769142" [ 1771.604811] env[62510]: _type = "Task" [ 1771.604811] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.629474] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769142, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.647131] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.659712] env[62510]: DEBUG nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1771.715120] env[62510]: DEBUG oslo_vmware.api [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769141, 'name': PowerOffVM_Task, 'duration_secs': 0.207719} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.720021] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1771.720021] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1771.720021] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19c5a67a-eb53-47d4-a150-a87f44db3cfe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.730901] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ae75b2-c59d-0680-b4ac-7f991a1ff8b5, 'name': SearchDatastore_Task, 'duration_secs': 0.03294} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.731216] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.731456] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1771.731677] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.731823] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.731998] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1771.732503] env[62510]: DEBUG oslo_concurrency.lockutils [req-a150b7c1-8dcb-47c0-8b14-7d900c6d0c13 req-a86de904-3cc0-43a6-bdee-7360309cabb1 service nova] Releasing lock "refresh_cache-dabc046f-10f5-43d8-90f8-507dcb4d0144" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.732824] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5fe176a3-1f2e-4f15-8338-2ff5081280d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.743073] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1771.743280] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1771.744048] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fbe27d7-058f-4a01-8dd6-84330bb1c01a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.753594] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1771.753594] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5282d59b-72ba-e12e-6753-5e2c1508ac88" [ 1771.753594] env[62510]: _type = "Task" [ 1771.753594] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.762660] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5282d59b-72ba-e12e-6753-5e2c1508ac88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.777882] env[62510]: DEBUG nova.network.neutron [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Successfully created port: a0b771a0-7aa2-49f0-9945-9956c4260b99 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1771.891343] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1771.891560] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1771.891772] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleting the datastore file [datastore1] 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1771.892112] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c817dd1c-89d4-4fb0-8c13-a91e344e3fcd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.927781] env[62510]: DEBUG oslo_vmware.api [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1771.927781] env[62510]: value = "task-1769144" [ 1771.927781] env[62510]: _type = "Task" [ 1771.927781] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.939043] env[62510]: DEBUG oslo_vmware.api [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.064501] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47762fa6-b17c-4607-86ee-79dd0949a6fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.074329] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32149bd9-034e-4c6b-8c5e-c99635865401 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.109200] env[62510]: DEBUG nova.network.neutron [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1772.115809] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133cda0f-9d4f-4016-b7d8-925bff2983be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.117646] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "77f485ae-9c4c-424e-8bac-6d023e428767" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.117889] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.118089] env[62510]: DEBUG nova.compute.manager [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Going to confirm migration 4 {{(pid=62510) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1772.134875] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769142, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.138775] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5266c0f-1b77-4cfb-a286-3da9fa93d9f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.155642] env[62510]: DEBUG nova.compute.provider_tree [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.266964] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5282d59b-72ba-e12e-6753-5e2c1508ac88, 'name': SearchDatastore_Task, 'duration_secs': 0.021857} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.267891] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fc94b06-e082-4cb4-ad0c-552c2b06be2b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.274509] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1772.274509] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529532ca-4927-55d4-2dff-0ead5918a5ca" [ 1772.274509] env[62510]: _type = "Task" [ 1772.274509] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.285438] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529532ca-4927-55d4-2dff-0ead5918a5ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.310950] env[62510]: DEBUG nova.network.neutron [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1772.439292] env[62510]: DEBUG oslo_vmware.api [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.618652] env[62510]: DEBUG oslo_vmware.api [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769142, 'name': PowerOnVM_Task, 'duration_secs': 0.742303} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.618921] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1772.661440] env[62510]: DEBUG nova.scheduler.client.report [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1772.669073] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.669346] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.669654] env[62510]: DEBUG nova.network.neutron [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.669758] env[62510]: DEBUG nova.objects.instance [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lazy-loading 'info_cache' on Instance uuid 77f485ae-9c4c-424e-8bac-6d023e428767 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1772.672054] env[62510]: DEBUG nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1772.731571] env[62510]: DEBUG nova.compute.manager [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1772.732516] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6055155c-da90-4d3a-af66-8a3d14d527b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.753138] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1772.753400] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1772.753557] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1772.753920] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1772.753920] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1772.754105] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1772.754234] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1772.754395] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1772.754667] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1772.754763] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1772.754908] env[62510]: DEBUG nova.virt.hardware [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1772.755751] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1315dc9e-a864-43c6-b994-78b0c77ab4cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.764951] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61774f4b-8dff-4612-a61b-f81bc6547536 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.789190] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529532ca-4927-55d4-2dff-0ead5918a5ca, 'name': SearchDatastore_Task, 'duration_secs': 0.084763} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.789458] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.789728] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] dabc046f-10f5-43d8-90f8-507dcb4d0144/dabc046f-10f5-43d8-90f8-507dcb4d0144.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1772.789997] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a02ac317-bcfc-42e4-b5b3-edb083908430 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.797918] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1772.797918] env[62510]: value = "task-1769145" [ 1772.797918] env[62510]: _type = "Task" [ 1772.797918] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.806565] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.814220] env[62510]: DEBUG oslo_concurrency.lockutils [req-bfc57ab3-6707-4bdf-8ffa-bb6b4d1aff4f req-f79b9a19-119a-457a-8ee1-c736a0c72854 service nova] Releasing lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.814574] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.814741] env[62510]: DEBUG nova.network.neutron [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.939871] env[62510]: DEBUG oslo_vmware.api [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.713637} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.940954] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1772.940954] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1772.940954] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1772.941174] env[62510]: INFO nova.compute.manager [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Took 1.77 seconds to destroy the instance on the hypervisor. [ 1772.941317] env[62510]: DEBUG oslo.service.loopingcall [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.941452] env[62510]: DEBUG nova.compute.manager [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1772.941550] env[62510]: DEBUG nova.network.neutron [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1773.167518] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.168183] env[62510]: DEBUG nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1773.171266] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.616s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.171266] env[62510]: DEBUG nova.objects.instance [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lazy-loading 'resources' on Instance uuid 144052ab-e3e7-401f-9edb-d8088780e468 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1773.254297] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c2d5642-e9df-4fb1-8104-750f663ebb6f tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 38.328s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.308950] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.356143] env[62510]: DEBUG nova.network.neutron [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1773.515540] env[62510]: DEBUG nova.network.neutron [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Updating instance_info_cache with network_info: [{"id": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "address": "fa:16:3e:97:f9:1a", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3047f95-b7", "ovs_interfaceid": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.676531] env[62510]: DEBUG nova.compute.utils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1773.684710] env[62510]: DEBUG nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1773.684710] env[62510]: DEBUG nova.network.neutron [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1773.692119] env[62510]: DEBUG nova.compute.manager [req-77453110-9f84-4d08-b142-58b7cfc15921 req-db02935d-478d-4633-9a0e-5fe33238bcf2 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Received event network-vif-deleted-d6ee81d1-3abc-4d5e-a8ca-658407cbd553 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1773.692616] env[62510]: INFO nova.compute.manager [req-77453110-9f84-4d08-b142-58b7cfc15921 req-db02935d-478d-4633-9a0e-5fe33238bcf2 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Neutron deleted interface d6ee81d1-3abc-4d5e-a8ca-658407cbd553; detaching it from the instance and deleting it from the info cache [ 1773.692982] env[62510]: DEBUG nova.network.neutron [req-77453110-9f84-4d08-b142-58b7cfc15921 req-db02935d-478d-4633-9a0e-5fe33238bcf2 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.814541] env[62510]: DEBUG nova.policy [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa4c3fa8aa6141558d7eb16e0e726b96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68579c8354b4431e8ec51575cda77325', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1773.824665] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769145, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.971983] env[62510]: DEBUG nova.network.neutron [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance_info_cache with network_info: [{"id": "9174aa7f-56a1-4625-be49-9a7f645e961b", "address": "fa:16:3e:c3:e4:7b", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9174aa7f-56", "ovs_interfaceid": "9174aa7f-56a1-4625-be49-9a7f645e961b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.017494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.017866] env[62510]: DEBUG nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Instance network_info: |[{"id": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "address": "fa:16:3e:97:f9:1a", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3047f95-b7", "ovs_interfaceid": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1774.018618] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:f9:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3047f95-b766-4344-bc0c-ad2f1b9f55fd', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1774.030162] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating folder: Project (de0d125bba6242d3b9614402098efc1f). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1774.033052] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4dd7b6c-b203-45d1-80a8-e782f0188ac4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.051387] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created folder: Project (de0d125bba6242d3b9614402098efc1f) in parent group-v367197. [ 1774.051641] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating folder: Instances. Parent ref: group-v367415. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1774.051930] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-351122a3-621b-4c56-adc0-3fac1cf2719f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.069853] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created folder: Instances in parent group-v367415. [ 1774.070015] env[62510]: DEBUG oslo.service.loopingcall [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.070266] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1774.074024] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-76ec1076-7d94-4315-bbf8-368fa7d2b26f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.097463] env[62510]: DEBUG nova.network.neutron [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Successfully updated port: a0b771a0-7aa2-49f0-9945-9956c4260b99 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1774.109020] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1774.109020] env[62510]: value = "task-1769148" [ 1774.109020] env[62510]: _type = "Task" [ 1774.109020] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.116291] env[62510]: DEBUG nova.network.neutron [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.126921] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769148, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.180480] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ab9e77-4db9-4aa8-a8f0-a70ede7afddb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.183849] env[62510]: DEBUG nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1774.192968] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56428f9b-3ebc-4ec7-b084-398fe9d7b705 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.197612] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a3777d0-3c3f-4983-b750-b107429647e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.251077] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64f8b22-12da-4f21-a88a-4fa21b131960 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.256672] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81304eb6-6d08-4eb0-bf80-341d7d24f98d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.276390] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d29ce0-2eec-4dc0-a423-7b6d305892be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.292663] env[62510]: DEBUG nova.compute.manager [req-77453110-9f84-4d08-b142-58b7cfc15921 req-db02935d-478d-4633-9a0e-5fe33238bcf2 service nova] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Detach interface failed, port_id=d6ee81d1-3abc-4d5e-a8ca-658407cbd553, reason: Instance 83fa0d32-18ee-401d-af0b-a0adb538e5f4 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1774.301449] env[62510]: DEBUG nova.compute.provider_tree [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.312894] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769145, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.482384] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-77f485ae-9c4c-424e-8bac-6d023e428767" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.482684] env[62510]: DEBUG nova.objects.instance [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lazy-loading 'migration_context' on Instance uuid 77f485ae-9c4c-424e-8bac-6d023e428767 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1774.604353] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.604353] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.604353] env[62510]: DEBUG nova.network.neutron [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1774.621871] env[62510]: INFO nova.compute.manager [-] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Took 1.68 seconds to deallocate network for instance. [ 1774.622542] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769148, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.659155] env[62510]: DEBUG nova.network.neutron [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Successfully created port: c380698b-8ec4-4110-a0c8-89549fc49f68 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1774.806762] env[62510]: DEBUG nova.scheduler.client.report [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1774.815226] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769145, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.526973} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.815768] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] dabc046f-10f5-43d8-90f8-507dcb4d0144/dabc046f-10f5-43d8-90f8-507dcb4d0144.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1774.815987] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1774.816260] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34b2cd21-2158-4ab6-a271-5ba54f787b5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.824735] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1774.824735] env[62510]: value = "task-1769149" [ 1774.824735] env[62510]: _type = "Task" [ 1774.824735] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.835571] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769149, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.989885] env[62510]: DEBUG nova.objects.base [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Object Instance<77f485ae-9c4c-424e-8bac-6d023e428767> lazy-loaded attributes: info_cache,migration_context {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1774.991217] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9172b11-6877-42b0-89cd-09d7c9a2a3c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.011258] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1511b35b-4370-41bf-805f-82b2d3e6286e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.018128] env[62510]: DEBUG oslo_vmware.api [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1775.018128] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d00353-b754-6ffc-a25d-6ded6cb1e3f6" [ 1775.018128] env[62510]: _type = "Task" [ 1775.018128] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.027339] env[62510]: DEBUG oslo_vmware.api [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d00353-b754-6ffc-a25d-6ded6cb1e3f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.120543] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769148, 'name': CreateVM_Task, 'duration_secs': 0.846344} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.121190] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1775.121933] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.122224] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.122577] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.122803] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d909e3a1-492a-4407-bbdd-68571a9cd454 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.128606] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1775.128606] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5279cb0a-5761-d32f-7999-b289f1826ff0" [ 1775.128606] env[62510]: _type = "Task" [ 1775.128606] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.134103] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.137389] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5279cb0a-5761-d32f-7999-b289f1826ff0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.178533] env[62510]: DEBUG nova.network.neutron [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1775.196546] env[62510]: DEBUG nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1775.227623] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1775.227623] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1775.227935] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1775.227991] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1775.228122] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1775.228272] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1775.228480] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1775.228636] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1775.228799] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1775.228961] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1775.229154] env[62510]: DEBUG nova.virt.hardware [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1775.230038] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680ab1fc-e42a-41ef-87e5-4d01293e4060 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.239698] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5169b2fd-5a95-4ca6-80d5-22e731b61fba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.317467] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.319787] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.649s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.320953] env[62510]: DEBUG nova.objects.instance [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'resources' on Instance uuid 774ea198-c933-449a-8380-2e4cc9327389 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1775.338723] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769149, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084693} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.339031] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1775.339926] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e30d3d-624e-444d-bcbf-ff760ef9ca36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.351803] env[62510]: INFO nova.scheduler.client.report [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted allocations for instance 144052ab-e3e7-401f-9edb-d8088780e468 [ 1775.378518] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] dabc046f-10f5-43d8-90f8-507dcb4d0144/dabc046f-10f5-43d8-90f8-507dcb4d0144.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1775.384503] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0298534e-e6b1-43a2-8646-b1f924974305 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.410893] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1775.410893] env[62510]: value = "task-1769150" [ 1775.410893] env[62510]: _type = "Task" [ 1775.410893] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.418325] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.469591] env[62510]: DEBUG nova.network.neutron [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Updating instance_info_cache with network_info: [{"id": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "address": "fa:16:3e:f1:7e:dd", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0b771a0-7a", "ovs_interfaceid": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.542971] env[62510]: DEBUG oslo_vmware.api [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d00353-b754-6ffc-a25d-6ded6cb1e3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.011025} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.543365] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.641090] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5279cb0a-5761-d32f-7999-b289f1826ff0, 'name': SearchDatastore_Task, 'duration_secs': 0.026302} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.641446] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.641682] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1775.641927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.642369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.642369] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1775.642911] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a630199-88e9-4731-bc43-1da23a1a575e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.654081] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1775.654325] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1775.655369] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54049c4c-0ac1-4f8e-88f7-8f2ba16e3f6d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.661738] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1775.661738] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529b4ece-f6a2-3bdf-c3c9-356aea8a247b" [ 1775.661738] env[62510]: _type = "Task" [ 1775.661738] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.670709] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529b4ece-f6a2-3bdf-c3c9-356aea8a247b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.783752] env[62510]: DEBUG nova.compute.manager [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Received event network-vif-plugged-a0b771a0-7aa2-49f0-9945-9956c4260b99 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1775.783992] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] Acquiring lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.784196] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.784583] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.784583] env[62510]: DEBUG nova.compute.manager [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] No waiting events found dispatching network-vif-plugged-a0b771a0-7aa2-49f0-9945-9956c4260b99 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1775.784708] env[62510]: WARNING nova.compute.manager [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Received unexpected event network-vif-plugged-a0b771a0-7aa2-49f0-9945-9956c4260b99 for instance with vm_state building and task_state spawning. [ 1775.784856] env[62510]: DEBUG nova.compute.manager [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Received event network-changed-a0b771a0-7aa2-49f0-9945-9956c4260b99 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1775.785017] env[62510]: DEBUG nova.compute.manager [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Refreshing instance network info cache due to event network-changed-a0b771a0-7aa2-49f0-9945-9956c4260b99. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1775.785187] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] Acquiring lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.904086] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be6d7cfe-f9a9-4e32-a95a-84d7307e5eb0 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "144052ab-e3e7-401f-9edb-d8088780e468" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.216s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.920177] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769150, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.973256] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.973666] env[62510]: DEBUG nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Instance network_info: |[{"id": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "address": "fa:16:3e:f1:7e:dd", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0b771a0-7a", "ovs_interfaceid": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1775.974016] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] Acquired lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.974188] env[62510]: DEBUG nova.network.neutron [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Refreshing network info cache for port a0b771a0-7aa2-49f0-9945-9956c4260b99 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1775.977910] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:7e:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0b771a0-7aa2-49f0-9945-9956c4260b99', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1775.985662] env[62510]: DEBUG oslo.service.loopingcall [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1775.988448] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1775.988899] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-083158c4-bdce-4428-a7f5-0610a30a54a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.015977] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1776.015977] env[62510]: value = "task-1769151" [ 1776.015977] env[62510]: _type = "Task" [ 1776.015977] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.030573] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769151, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.087294] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6e5fd7-978b-4f10-8af0-3eff8754c46c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.100251] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cee90812-7105-4b20-9ed7-4dec4def38dc tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Suspending the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1776.100251] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-5a073e38-fbd1-4ba0-bd73-973f0a6a3353 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.107286] env[62510]: DEBUG oslo_vmware.api [None req-cee90812-7105-4b20-9ed7-4dec4def38dc tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1776.107286] env[62510]: value = "task-1769152" [ 1776.107286] env[62510]: _type = "Task" [ 1776.107286] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.116502] env[62510]: DEBUG oslo_vmware.api [None req-cee90812-7105-4b20-9ed7-4dec4def38dc tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769152, 'name': SuspendVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.172466] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529b4ece-f6a2-3bdf-c3c9-356aea8a247b, 'name': SearchDatastore_Task, 'duration_secs': 0.016498} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.176206] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcfa2251-06bf-4329-915e-b774dfa85e47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.183201] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1776.183201] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cfcc1d-fe47-7475-8b69-dd2e36336950" [ 1776.183201] env[62510]: _type = "Task" [ 1776.183201] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.195364] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cfcc1d-fe47-7475-8b69-dd2e36336950, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.200431] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8c4d47-e9a3-4b5b-a45b-252928b56c49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.208610] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fb68a6-5457-43ad-b81e-603d113ea242 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.240454] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7b886e-0a94-476e-aa21-d12f3c5eb953 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.249809] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee970ea-40c0-4593-97e9-cdadafc03acc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.266611] env[62510]: DEBUG nova.compute.provider_tree [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.417312] env[62510]: DEBUG nova.network.neutron [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Successfully updated port: c380698b-8ec4-4110-a0c8-89549fc49f68 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1776.421994] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769150, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.543643] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769151, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.621650] env[62510]: DEBUG oslo_vmware.api [None req-cee90812-7105-4b20-9ed7-4dec4def38dc tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769152, 'name': SuspendVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.650401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e737d9a-cba6-44ed-a9a5-143e811f6e2c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.650699] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e737d9a-cba6-44ed-a9a5-143e811f6e2c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.651258] env[62510]: DEBUG nova.objects.instance [None req-6e737d9a-cba6-44ed-a9a5-143e811f6e2c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'flavor' on Instance uuid 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1776.701059] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cfcc1d-fe47-7475-8b69-dd2e36336950, 'name': SearchDatastore_Task, 'duration_secs': 0.019578} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.701397] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.701665] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/2f7b02e8-f658-448f-b6e6-9bfa94c74da4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1776.701944] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-676a3c2d-88b4-44d1-b334-9b990569ce49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.711766] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1776.711766] env[62510]: value = "task-1769153" [ 1776.711766] env[62510]: _type = "Task" [ 1776.711766] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.722907] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769153, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.744289] env[62510]: DEBUG nova.network.neutron [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Updated VIF entry in instance network info cache for port a0b771a0-7aa2-49f0-9945-9956c4260b99. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1776.744854] env[62510]: DEBUG nova.network.neutron [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Updating instance_info_cache with network_info: [{"id": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "address": "fa:16:3e:f1:7e:dd", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0b771a0-7a", "ovs_interfaceid": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.770729] env[62510]: DEBUG nova.scheduler.client.report [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1776.923486] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "refresh_cache-9373089f-dbd4-4ac9-8736-e4c929fe6fb0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.923625] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "refresh_cache-9373089f-dbd4-4ac9-8736-e4c929fe6fb0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.923699] env[62510]: DEBUG nova.network.neutron [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1776.924797] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769150, 'name': ReconfigVM_Task, 'duration_secs': 1.360986} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.925437] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Reconfigured VM instance instance-00000051 to attach disk [datastore1] dabc046f-10f5-43d8-90f8-507dcb4d0144/dabc046f-10f5-43d8-90f8-507dcb4d0144.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1776.925750] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c7e72d8-23fa-43b2-bf61-3a83bcfa2c7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.935421] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1776.935421] env[62510]: value = "task-1769154" [ 1776.935421] env[62510]: _type = "Task" [ 1776.935421] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.947515] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769154, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.032399] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769151, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.120406] env[62510]: DEBUG oslo_vmware.api [None req-cee90812-7105-4b20-9ed7-4dec4def38dc tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769152, 'name': SuspendVM_Task, 'duration_secs': 0.909914} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.120639] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cee90812-7105-4b20-9ed7-4dec4def38dc tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Suspended the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1777.121713] env[62510]: DEBUG nova.compute.manager [None req-cee90812-7105-4b20-9ed7-4dec4def38dc tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1777.121713] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b88c6eb-c6c1-42e1-a9ab-0e8976052460 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.156139] env[62510]: DEBUG nova.objects.instance [None req-6e737d9a-cba6-44ed-a9a5-143e811f6e2c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'pci_requests' on Instance uuid 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1777.195285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "16b5d928-94fe-4fd5-9909-775c28d7edd2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.195614] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.195958] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "16b5d928-94fe-4fd5-9909-775c28d7edd2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.196080] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.196289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.198835] env[62510]: INFO nova.compute.manager [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Terminating instance [ 1777.223876] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769153, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.249270] env[62510]: DEBUG oslo_concurrency.lockutils [req-0f5361a1-e85d-4f04-9d16-ca5114c783c1 req-5d6d86a8-91d8-4203-a2ab-5f23008397ea service nova] Releasing lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.275460] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.956s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.278106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.077s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.278414] env[62510]: DEBUG nova.objects.instance [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'resources' on Instance uuid 8b079310-084b-4ba0-8a82-57d64f421c11 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1777.301657] env[62510]: INFO nova.scheduler.client.report [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted allocations for instance 774ea198-c933-449a-8380-2e4cc9327389 [ 1777.450567] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769154, 'name': Rename_Task, 'duration_secs': 0.243307} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.450934] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1777.451417] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e168ce87-c1f0-4132-ba6f-5900004eca7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.461422] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1777.461422] env[62510]: value = "task-1769155" [ 1777.461422] env[62510]: _type = "Task" [ 1777.461422] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.462416] env[62510]: DEBUG nova.network.neutron [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1777.476259] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.533870] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769151, 'name': CreateVM_Task, 'duration_secs': 1.22155} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.534132] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1777.534969] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.535218] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.535586] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1777.535878] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b604912a-9b08-44b3-812e-ca904d9a0033 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.542555] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1777.542555] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5253f4bb-9e02-7527-f94c-4bbcca108677" [ 1777.542555] env[62510]: _type = "Task" [ 1777.542555] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.552773] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5253f4bb-9e02-7527-f94c-4bbcca108677, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.659113] env[62510]: DEBUG nova.objects.base [None req-6e737d9a-cba6-44ed-a9a5-143e811f6e2c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Object Instance<241d842d-3dd5-4ac2-a18a-12b9c9fbd340> lazy-loaded attributes: flavor,pci_requests {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1777.659364] env[62510]: DEBUG nova.network.neutron [None req-6e737d9a-cba6-44ed-a9a5-143e811f6e2c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1777.703891] env[62510]: DEBUG nova.compute.manager [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1777.704195] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1777.705827] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5235c8ab-a50d-4b5b-9fa1-a44be0454799 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.712808] env[62510]: DEBUG nova.network.neutron [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Updating instance_info_cache with network_info: [{"id": "c380698b-8ec4-4110-a0c8-89549fc49f68", "address": "fa:16:3e:ba:26:8c", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc380698b-8e", "ovs_interfaceid": "c380698b-8ec4-4110-a0c8-89549fc49f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.723027] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1777.723027] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a1df4be-cfb4-4187-93a1-790a68972723 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.731412] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769153, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.770649} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.733416] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/2f7b02e8-f658-448f-b6e6-9bfa94c74da4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1777.733634] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1777.734614] env[62510]: DEBUG oslo_vmware.api [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1777.734614] env[62510]: value = "task-1769156" [ 1777.734614] env[62510]: _type = "Task" [ 1777.734614] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.734614] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1f6ff745-8410-4820-a34a-a691d8e2e5dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.746893] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1777.746893] env[62510]: value = "task-1769157" [ 1777.746893] env[62510]: _type = "Task" [ 1777.746893] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.750830] env[62510]: DEBUG oslo_vmware.api [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.763732] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.803758] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6e737d9a-cba6-44ed-a9a5-143e811f6e2c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.153s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.810636] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c9bb28de-5c07-49d3-a287-5b5349eec44c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "774ea198-c933-449a-8380-2e4cc9327389" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.416s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.980311] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769155, 'name': PowerOnVM_Task} progress is 91%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.033320] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "9fe592c1-e23a-46d5-8952-c181709d93e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.033742] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.058544] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5253f4bb-9e02-7527-f94c-4bbcca108677, 'name': SearchDatastore_Task, 'duration_secs': 0.018489} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.059154] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.059441] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1778.060535] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.060702] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.060901] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1778.063700] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4a386c2-047a-465a-b271-0f56cc4d19f1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.081142] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1778.081456] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1778.084033] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb03bff-9ef5-482b-b7aa-6c9a67125742 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.089963] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1778.089963] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5205c1ed-9e4e-5d63-aa69-c3263a12c974" [ 1778.089963] env[62510]: _type = "Task" [ 1778.089963] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.092115] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52648f17-266f-5e35-088b-38e71b953533/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1778.095500] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28ca7b1-a5c4-4c7e-b930-f306a76841dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.107456] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5205c1ed-9e4e-5d63-aa69-c3263a12c974, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.109379] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52648f17-266f-5e35-088b-38e71b953533/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1778.109379] env[62510]: ERROR oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52648f17-266f-5e35-088b-38e71b953533/disk-0.vmdk due to incomplete transfer. [ 1778.112438] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5e79e93d-20f8-4c0c-bcd6-c8b24a580db2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.120708] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52648f17-266f-5e35-088b-38e71b953533/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1778.120964] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Uploaded image 16568b7b-b6fc-4a0b-a998-c12029e93388 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1778.123560] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1778.123811] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cae75e2e-5fbd-4ceb-9211-b7408a681ca8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.135616] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1778.135616] env[62510]: value = "task-1769158" [ 1778.135616] env[62510]: _type = "Task" [ 1778.135616] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.148884] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769158, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.178562] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5521f41b-14e2-4dc4-881c-515ca9416b56 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.187155] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0eb00e-e1c5-44b7-a48f-16208e5961c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.219461] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e95d392-9f7d-4553-9f08-416d3b4dbb49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.222418] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "refresh_cache-9373089f-dbd4-4ac9-8736-e4c929fe6fb0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.222723] env[62510]: DEBUG nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Instance network_info: |[{"id": "c380698b-8ec4-4110-a0c8-89549fc49f68", "address": "fa:16:3e:ba:26:8c", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc380698b-8e", "ovs_interfaceid": "c380698b-8ec4-4110-a0c8-89549fc49f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1778.223196] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:26:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '26472e27-9835-4f87-ab7f-ca24dfee4e83', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c380698b-8ec4-4110-a0c8-89549fc49f68', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1778.230976] env[62510]: DEBUG oslo.service.loopingcall [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1778.231773] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1778.232058] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65d582d9-d09c-4adb-8561-5dae5077495b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.253310] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-252fdcf4-4b3c-42fb-8aef-22d4002265ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.260720] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1778.260720] env[62510]: value = "task-1769159" [ 1778.260720] env[62510]: _type = "Task" [ 1778.260720] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.269715] env[62510]: DEBUG nova.compute.manager [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Received event network-vif-plugged-c380698b-8ec4-4110-a0c8-89549fc49f68 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1778.269928] env[62510]: DEBUG oslo_concurrency.lockutils [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] Acquiring lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.270202] env[62510]: DEBUG oslo_concurrency.lockutils [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.270392] env[62510]: DEBUG oslo_concurrency.lockutils [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.270562] env[62510]: DEBUG nova.compute.manager [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] No waiting events found dispatching network-vif-plugged-c380698b-8ec4-4110-a0c8-89549fc49f68 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1778.270728] env[62510]: WARNING nova.compute.manager [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Received unexpected event network-vif-plugged-c380698b-8ec4-4110-a0c8-89549fc49f68 for instance with vm_state building and task_state spawning. [ 1778.270889] env[62510]: DEBUG nova.compute.manager [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Received event network-changed-c380698b-8ec4-4110-a0c8-89549fc49f68 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1778.271104] env[62510]: DEBUG nova.compute.manager [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Refreshing instance network info cache due to event network-changed-c380698b-8ec4-4110-a0c8-89549fc49f68. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1778.271351] env[62510]: DEBUG oslo_concurrency.lockutils [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] Acquiring lock "refresh_cache-9373089f-dbd4-4ac9-8736-e4c929fe6fb0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.271722] env[62510]: DEBUG oslo_concurrency.lockutils [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] Acquired lock "refresh_cache-9373089f-dbd4-4ac9-8736-e4c929fe6fb0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.271722] env[62510]: DEBUG nova.network.neutron [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Refreshing network info cache for port c380698b-8ec4-4110-a0c8-89549fc49f68 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1778.285255] env[62510]: DEBUG nova.compute.provider_tree [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1778.286924] env[62510]: DEBUG oslo_vmware.api [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769156, 'name': PowerOffVM_Task, 'duration_secs': 0.237435} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.291781] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1778.291781] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1778.292227] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08528} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.292385] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4dd38de-2f5a-4144-b9a1-8e3480fa38e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.294150] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1778.298480] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad1d20a-c81f-4752-902f-b4a70c57fe8a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.301943] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769159, 'name': CreateVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.325886] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/2f7b02e8-f658-448f-b6e6-9bfa94c74da4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1778.326690] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14fa7346-ba90-42b7-8eb7-d0404dd9f03c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.348382] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1778.348382] env[62510]: value = "task-1769161" [ 1778.348382] env[62510]: _type = "Task" [ 1778.348382] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.359707] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.393778] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1778.394111] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1778.394367] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleting the datastore file [datastore1] 16b5d928-94fe-4fd5-9909-775c28d7edd2 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1778.394695] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9973e47c-5c2d-4704-8b70-17296c076a2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.403818] env[62510]: DEBUG oslo_vmware.api [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1778.403818] env[62510]: value = "task-1769162" [ 1778.403818] env[62510]: _type = "Task" [ 1778.403818] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.414442] env[62510]: DEBUG oslo_vmware.api [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.478475] env[62510]: DEBUG oslo_vmware.api [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769155, 'name': PowerOnVM_Task, 'duration_secs': 0.583291} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.479088] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1778.479348] env[62510]: INFO nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Took 11.04 seconds to spawn the instance on the hypervisor. [ 1778.479542] env[62510]: DEBUG nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1778.480358] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-116b4605-1976-4f34-870d-b1c8a58c44be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.537814] env[62510]: DEBUG nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1778.605407] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5205c1ed-9e4e-5d63-aa69-c3263a12c974, 'name': SearchDatastore_Task, 'duration_secs': 0.044174} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.606336] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5338215-ae24-4d0d-bf82-321e3b132a31 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.612246] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1778.612246] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524f64f4-8ba9-e8a4-19f4-d5ab5e36cf43" [ 1778.612246] env[62510]: _type = "Task" [ 1778.612246] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.622447] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524f64f4-8ba9-e8a4-19f4-d5ab5e36cf43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.645777] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769158, 'name': Destroy_Task, 'duration_secs': 0.353434} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.646056] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Destroyed the VM [ 1778.646307] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1778.646557] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-da8b7e5c-5c71-4d2e-9764-d78aa1ee6871 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.655490] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1778.655490] env[62510]: value = "task-1769163" [ 1778.655490] env[62510]: _type = "Task" [ 1778.655490] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.664463] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769163, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.775769] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769159, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.792165] env[62510]: DEBUG nova.scheduler.client.report [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1778.859297] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.914515] env[62510]: DEBUG oslo_vmware.api [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347046} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.914786] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1778.914972] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1778.915165] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1778.915356] env[62510]: INFO nova.compute.manager [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1778.915741] env[62510]: DEBUG oslo.service.loopingcall [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1778.915979] env[62510]: DEBUG nova.compute.manager [-] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1778.916092] env[62510]: DEBUG nova.network.neutron [-] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1779.004436] env[62510]: INFO nova.compute.manager [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Took 22.48 seconds to build instance. [ 1779.059462] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.123649] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524f64f4-8ba9-e8a4-19f4-d5ab5e36cf43, 'name': SearchDatastore_Task, 'duration_secs': 0.022165} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.123956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.124496] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/8a230335-6388-45fb-a29e-9e63ddb4d5f2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1779.124699] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b74928f5-28f2-4baf-b87e-ba860ad7dc0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.132018] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1779.132018] env[62510]: value = "task-1769164" [ 1779.132018] env[62510]: _type = "Task" [ 1779.132018] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.143869] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.165309] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769163, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.257380] env[62510]: DEBUG nova.network.neutron [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Updated VIF entry in instance network info cache for port c380698b-8ec4-4110-a0c8-89549fc49f68. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1779.257826] env[62510]: DEBUG nova.network.neutron [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Updating instance_info_cache with network_info: [{"id": "c380698b-8ec4-4110-a0c8-89549fc49f68", "address": "fa:16:3e:ba:26:8c", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc380698b-8e", "ovs_interfaceid": "c380698b-8ec4-4110-a0c8-89549fc49f68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.278131] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769159, 'name': CreateVM_Task, 'duration_secs': 0.577261} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.278339] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1779.279221] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.279446] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.279804] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1779.280312] env[62510]: INFO nova.compute.manager [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Resuming [ 1779.280840] env[62510]: DEBUG nova.objects.instance [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lazy-loading 'flavor' on Instance uuid e7daad63-c802-4a86-bead-7e849064ed61 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1779.282268] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b37623ee-83b5-4504-8976-74ddd5707c36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.290383] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1779.290383] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521c679a-6df6-8040-4302-def7f6db3df1" [ 1779.290383] env[62510]: _type = "Task" [ 1779.290383] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.299605] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.022s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.301789] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521c679a-6df6-8040-4302-def7f6db3df1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.302215] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.032s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.303954] env[62510]: INFO nova.compute.claims [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1779.326068] env[62510]: INFO nova.scheduler.client.report [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted allocations for instance 8b079310-084b-4ba0-8a82-57d64f421c11 [ 1779.364571] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769161, 'name': ReconfigVM_Task, 'duration_secs': 0.611392} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.365543] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/2f7b02e8-f658-448f-b6e6-9bfa94c74da4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1779.366207] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f041b2ae-2b04-49cd-b6bc-ac7765a59a8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.376104] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1779.376104] env[62510]: value = "task-1769165" [ 1779.376104] env[62510]: _type = "Task" [ 1779.376104] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.388379] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769165, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.506535] env[62510]: DEBUG oslo_concurrency.lockutils [None req-89e11e45-d8b0-4cac-beac-e0a4731196ff tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.994s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.643223] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769164, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.668130] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769163, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.761655] env[62510]: DEBUG oslo_concurrency.lockutils [req-c519013e-e26b-4f2c-b2ab-7ffd4ddc942c req-25c09761-1322-4fc5-9ec2-e43a002f1d41 service nova] Releasing lock "refresh_cache-9373089f-dbd4-4ac9-8736-e4c929fe6fb0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.801029] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521c679a-6df6-8040-4302-def7f6db3df1, 'name': SearchDatastore_Task, 'duration_secs': 0.023005} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.801910] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.801910] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1779.801910] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.802186] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.802227] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1779.802462] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9df1f87-16cf-4241-857f-468fc0ddc3e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.812865] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1779.813012] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1779.813750] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34378997-8b43-4835-aad4-5922d29db5d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.820402] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1779.820402] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52afd371-fa31-480b-6c9b-b7d178a9a1e4" [ 1779.820402] env[62510]: _type = "Task" [ 1779.820402] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.829777] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52afd371-fa31-480b-6c9b-b7d178a9a1e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.836044] env[62510]: DEBUG oslo_concurrency.lockutils [None req-58a00e9d-c0aa-454a-b7a9-09b96b730702 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8b079310-084b-4ba0-8a82-57d64f421c11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.019s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.886013] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769165, 'name': Rename_Task, 'duration_secs': 0.472206} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.886302] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1779.886550] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-331b7316-eb1a-48c8-8d57-71d12d418963 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.893770] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1779.893770] env[62510]: value = "task-1769166" [ 1779.893770] env[62510]: _type = "Task" [ 1779.893770] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.901761] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769166, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.927812] env[62510]: DEBUG nova.network.neutron [-] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.044029] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a91574d-ea50-435f-af67-1e44829ee17c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "07966218-61f5-4449-8b28-378d892d8a38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.044458] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a91574d-ea50-435f-af67-1e44829ee17c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "07966218-61f5-4449-8b28-378d892d8a38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.144701] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619401} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.144983] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/8a230335-6388-45fb-a29e-9e63ddb4d5f2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1780.145226] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1780.145482] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fb67231-2a7e-4786-8d51-60b57fe5b0bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.153268] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1780.153268] env[62510]: value = "task-1769167" [ 1780.153268] env[62510]: _type = "Task" [ 1780.153268] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.169041] env[62510]: DEBUG oslo_vmware.api [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769163, 'name': RemoveSnapshot_Task, 'duration_secs': 1.218899} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.171627] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1780.171896] env[62510]: INFO nova.compute.manager [None req-0441fa6c-b5f3-4452-b9d6-4c1fc774b73f tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Took 20.56 seconds to snapshot the instance on the hypervisor. [ 1780.174509] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769167, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.193186] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.193433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.193745] env[62510]: DEBUG nova.objects.instance [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'flavor' on Instance uuid 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1780.332229] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52afd371-fa31-480b-6c9b-b7d178a9a1e4, 'name': SearchDatastore_Task, 'duration_secs': 0.012032} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.335817] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-667bf69e-9367-40e4-a1e6-9145b6af3db5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.342334] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1780.342334] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bef889-ac33-355e-fec5-202291386027" [ 1780.342334] env[62510]: _type = "Task" [ 1780.342334] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.353461] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bef889-ac33-355e-fec5-202291386027, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.404289] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769166, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.430128] env[62510]: INFO nova.compute.manager [-] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Took 1.51 seconds to deallocate network for instance. [ 1780.546266] env[62510]: DEBUG nova.compute.manager [None req-4a91574d-ea50-435f-af67-1e44829ee17c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 07966218-61f5-4449-8b28-378d892d8a38] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1780.565514] env[62510]: DEBUG nova.compute.manager [req-5b9c40f4-8b0f-4361-83ac-170d49dbb1dc req-393a85d3-8473-4a9d-be3a-7492f9e35273 service nova] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Received event network-vif-deleted-825c17f2-947d-4f00-a176-ae94de2a927d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1780.640363] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54771914-19fd-4b6c-b07a-8faa9e799255 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.649683] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3a833b-0b6f-4f3b-8659-16af3fe78fbe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.686700] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ce258d-4ce9-48b8-ae26-9fa301509783 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.693345] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769167, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092182} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.693791] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1780.694604] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a661133a-2664-49fe-b6f7-2018510e3be2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.704039] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e2c2fc7-9347-47a9-9c60-82b4127edd98 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.726620] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/8a230335-6388-45fb-a29e-9e63ddb4d5f2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1780.727718] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc822fff-c1d2-4724-a60b-724eee8f6fbd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.752224] env[62510]: DEBUG nova.compute.provider_tree [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1780.760787] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1780.760787] env[62510]: value = "task-1769168" [ 1780.760787] env[62510]: _type = "Task" [ 1780.760787] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.770063] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769168, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.792074] env[62510]: DEBUG oslo_concurrency.lockutils [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.792347] env[62510]: DEBUG oslo_concurrency.lockutils [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquired lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.792582] env[62510]: DEBUG nova.network.neutron [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1780.809209] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.809432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.809696] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.809967] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.810260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.813280] env[62510]: INFO nova.compute.manager [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Terminating instance [ 1780.839857] env[62510]: DEBUG nova.objects.instance [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'pci_requests' on Instance uuid 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1780.855223] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bef889-ac33-355e-fec5-202291386027, 'name': SearchDatastore_Task, 'duration_secs': 0.019925} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.855613] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1780.855961] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9373089f-dbd4-4ac9-8736-e4c929fe6fb0/9373089f-dbd4-4ac9-8736-e4c929fe6fb0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1780.856960] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3a9226c-bb88-4230-9b6c-da5f79ea8b22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.865934] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1780.865934] env[62510]: value = "task-1769169" [ 1780.865934] env[62510]: _type = "Task" [ 1780.865934] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.877916] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.905756] env[62510]: DEBUG oslo_vmware.api [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769166, 'name': PowerOnVM_Task, 'duration_secs': 0.586876} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.906105] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1780.906441] env[62510]: INFO nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Took 10.78 seconds to spawn the instance on the hypervisor. [ 1780.906522] env[62510]: DEBUG nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1780.907413] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b6ceaa-15b5-471b-8d40-480b9a55fb2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.940783] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.055329] env[62510]: DEBUG nova.compute.manager [None req-4a91574d-ea50-435f-af67-1e44829ee17c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 07966218-61f5-4449-8b28-378d892d8a38] Instance disappeared before build. {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1781.255566] env[62510]: DEBUG nova.scheduler.client.report [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1781.273142] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769168, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.319401] env[62510]: DEBUG nova.compute.manager [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1781.319578] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1781.320560] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4608ce-c0c4-4919-bb1a-98aee1a83725 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.329644] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1781.330617] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-634bb668-a425-4aa2-b850-fe149f79ad07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.337769] env[62510]: DEBUG oslo_vmware.api [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1781.337769] env[62510]: value = "task-1769170" [ 1781.337769] env[62510]: _type = "Task" [ 1781.337769] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.341933] env[62510]: DEBUG nova.objects.base [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Object Instance<241d842d-3dd5-4ac2-a18a-12b9c9fbd340> lazy-loaded attributes: flavor,pci_requests {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1781.342158] env[62510]: DEBUG nova.network.neutron [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1781.351245] env[62510]: DEBUG oslo_vmware.api [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769170, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.377515] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.427425] env[62510]: INFO nova.compute.manager [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Took 23.89 seconds to build instance. [ 1781.450088] env[62510]: DEBUG nova.policy [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1781.575634] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a91574d-ea50-435f-af67-1e44829ee17c tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "07966218-61f5-4449-8b28-378d892d8a38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.531s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.762066] env[62510]: DEBUG nova.network.neutron [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [{"id": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "address": "fa:16:3e:3b:f9:de", "network": {"id": "259e31c4-74f6-4d58-9f76-c7b34d594473", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1218880601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f85ce3c02964d36a77221ba8235978c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e85cbc56-fee0-41f7-bc70-64f31775ce92", "external-id": "nsx-vlan-transportzone-793", "segmentation_id": 793, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13fb40b1-13", "ovs_interfaceid": "13fb40b1-132b-407d-b6e0-eec141ae88a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1781.764551] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.765472] env[62510]: DEBUG nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1781.769964] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.787s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.772271] env[62510]: INFO nova.compute.claims [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1781.790088] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769168, 'name': ReconfigVM_Task, 'duration_secs': 0.828217} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.790719] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/8a230335-6388-45fb-a29e-9e63ddb4d5f2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1781.791229] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b85cc99-52dc-44c7-9aa8-620f4cbbd9a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.801103] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1781.801103] env[62510]: value = "task-1769171" [ 1781.801103] env[62510]: _type = "Task" [ 1781.801103] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.813590] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769171, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.851717] env[62510]: DEBUG oslo_vmware.api [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769170, 'name': PowerOffVM_Task, 'duration_secs': 0.355339} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.852026] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1781.852519] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1781.853214] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77bf3ea2-9ef3-4b93-a437-6cb9e548f761 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.878186] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769169, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.929845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ebb5aad9-d76b-46ce-bf58-ca7745bb0414 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.404s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.984742] env[62510]: DEBUG nova.network.neutron [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Successfully created port: 19ad8f3d-cc51-441e-862f-31fabe6277ae {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1782.054590] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1782.054841] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1782.055062] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleting the datastore file [datastore1] b7ffe11f-2f63-419b-9ad8-0a89a05d201c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1782.055380] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bee4306-e4a2-49cc-83e7-ec696c67c414 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.062392] env[62510]: DEBUG oslo_vmware.api [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1782.062392] env[62510]: value = "task-1769173" [ 1782.062392] env[62510]: _type = "Task" [ 1782.062392] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.073648] env[62510]: DEBUG oslo_vmware.api [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769173, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.270472] env[62510]: DEBUG oslo_concurrency.lockutils [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Releasing lock "refresh_cache-e7daad63-c802-4a86-bead-7e849064ed61" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.272525] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2a22c4-9107-4e99-ab68-e81d5b5f0682 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.280586] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Resuming the VM {{(pid=62510) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1782.280791] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c03805c-97b2-4374-940e-d4afaeeb06f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.283519] env[62510]: DEBUG nova.compute.utils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1782.288689] env[62510]: DEBUG nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1782.288861] env[62510]: DEBUG nova.network.neutron [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1782.298422] env[62510]: DEBUG oslo_vmware.api [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1782.298422] env[62510]: value = "task-1769174" [ 1782.298422] env[62510]: _type = "Task" [ 1782.298422] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.310224] env[62510]: DEBUG oslo_vmware.api [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769174, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.316932] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769171, 'name': Rename_Task, 'duration_secs': 0.32102} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.317343] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1782.317750] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-729ddbd5-454f-4749-a64d-7ab2a53947d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.325940] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1782.325940] env[62510]: value = "task-1769175" [ 1782.325940] env[62510]: _type = "Task" [ 1782.325940] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.338183] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.347184] env[62510]: DEBUG nova.policy [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f8223ce0f63d477ba38653abf5992eb2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc4a16c9d5d346489a9c8efec041df23', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1782.380351] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769169, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.065273} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.380653] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9373089f-dbd4-4ac9-8736-e4c929fe6fb0/9373089f-dbd4-4ac9-8736-e4c929fe6fb0.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1782.381053] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1782.382948] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f83e38a5-60c0-424f-be9d-2b34b92e8b16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.394430] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1782.394430] env[62510]: value = "task-1769176" [ 1782.394430] env[62510]: _type = "Task" [ 1782.394430] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.405533] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "313f7916-0737-4e44-ae2f-58301934bf06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1782.406077] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.412702] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769176, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.573052] env[62510]: DEBUG oslo_vmware.api [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769173, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.466134} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.573621] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1782.573848] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1782.574042] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1782.574239] env[62510]: INFO nova.compute.manager [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1782.574756] env[62510]: DEBUG oslo.service.loopingcall [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.574999] env[62510]: DEBUG nova.compute.manager [-] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1782.575110] env[62510]: DEBUG nova.network.neutron [-] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1782.789499] env[62510]: DEBUG nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1782.807787] env[62510]: DEBUG oslo_vmware.api [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769174, 'name': PowerOnVM_Task} progress is 76%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.837230] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769175, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.906415] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769176, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09905} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.907083] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1782.907527] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49213c43-b020-4045-bdab-80bffd777b9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.927953] env[62510]: DEBUG nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1782.940206] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 9373089f-dbd4-4ac9-8736-e4c929fe6fb0/9373089f-dbd4-4ac9-8736-e4c929fe6fb0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1782.944404] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc4230b2-0a40-4217-aae1-ddb5b7701a13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.967198] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1782.967198] env[62510]: value = "task-1769177" [ 1782.967198] env[62510]: _type = "Task" [ 1782.967198] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.979683] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769177, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.001104] env[62510]: DEBUG nova.network.neutron [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Successfully created port: 82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1783.221872] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156e1786-a81a-49b9-a7be-105d982a0e9c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.231654] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ba0173-dc4b-46f5-81c5-972f84667209 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.268677] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6b4070-5e89-4438-8f22-b725f4a4fd6f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.278018] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc00a42-61f6-4503-b9fc-df3b25ddb8db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.303485] env[62510]: DEBUG nova.compute.provider_tree [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1783.322388] env[62510]: DEBUG nova.network.neutron [-] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.324340] env[62510]: DEBUG oslo_vmware.api [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769174, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.342949] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769175, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.411587] env[62510]: DEBUG nova.compute.manager [req-b8ce9f64-bd3e-4feb-8436-b5dbd2833b34 req-40d1785f-c17a-4a2f-99c4-d7b192997ffc service nova] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Received event network-vif-deleted-300bfb1b-5d55-410a-b114-cb4043425263 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1783.460594] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.478398] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769177, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.723987] env[62510]: DEBUG nova.network.neutron [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Successfully updated port: 19ad8f3d-cc51-441e-862f-31fabe6277ae {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1783.806696] env[62510]: DEBUG nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1783.809837] env[62510]: DEBUG nova.scheduler.client.report [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1783.822570] env[62510]: DEBUG oslo_vmware.api [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769174, 'name': PowerOnVM_Task, 'duration_secs': 1.03465} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.823700] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Resumed the VM {{(pid=62510) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1783.823700] env[62510]: DEBUG nova.compute.manager [None req-40a3fc4b-0d3f-4b43-a930-44c404163e80 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1783.824486] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c6a966-fa40-4d9a-949b-fee4ea4e0388 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.831911] env[62510]: INFO nova.compute.manager [-] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Took 1.26 seconds to deallocate network for instance. [ 1783.843472] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1783.843709] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1783.843844] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1783.844121] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1783.844286] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1783.844434] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1783.844892] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1783.845278] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1783.845496] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1783.845671] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1783.845852] env[62510]: DEBUG nova.virt.hardware [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1783.847197] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a314f493-ccd6-4450-8297-c998804fa42a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.858879] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769175, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.864146] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1b2da9-a4bc-48e8-bb3c-4db3cac3a8d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.978516] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769177, 'name': ReconfigVM_Task, 'duration_secs': 0.673675} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.978805] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 9373089f-dbd4-4ac9-8736-e4c929fe6fb0/9373089f-dbd4-4ac9-8736-e4c929fe6fb0.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1783.979485] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-548adee8-3cfd-484d-96ec-a674977b0256 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.987969] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1783.987969] env[62510]: value = "task-1769178" [ 1783.987969] env[62510]: _type = "Task" [ 1783.987969] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.996091] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769178, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.189325] env[62510]: DEBUG nova.compute.manager [req-dadc6ecc-4084-4de1-b14f-728d314081ad req-04141f99-c593-467a-99d9-4dadbe7e5571 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-vif-plugged-19ad8f3d-cc51-441e-862f-31fabe6277ae {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1784.189441] env[62510]: DEBUG oslo_concurrency.lockutils [req-dadc6ecc-4084-4de1-b14f-728d314081ad req-04141f99-c593-467a-99d9-4dadbe7e5571 service nova] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.189682] env[62510]: DEBUG oslo_concurrency.lockutils [req-dadc6ecc-4084-4de1-b14f-728d314081ad req-04141f99-c593-467a-99d9-4dadbe7e5571 service nova] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.189874] env[62510]: DEBUG oslo_concurrency.lockutils [req-dadc6ecc-4084-4de1-b14f-728d314081ad req-04141f99-c593-467a-99d9-4dadbe7e5571 service nova] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.190060] env[62510]: DEBUG nova.compute.manager [req-dadc6ecc-4084-4de1-b14f-728d314081ad req-04141f99-c593-467a-99d9-4dadbe7e5571 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] No waiting events found dispatching network-vif-plugged-19ad8f3d-cc51-441e-862f-31fabe6277ae {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1784.190239] env[62510]: WARNING nova.compute.manager [req-dadc6ecc-4084-4de1-b14f-728d314081ad req-04141f99-c593-467a-99d9-4dadbe7e5571 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received unexpected event network-vif-plugged-19ad8f3d-cc51-441e-862f-31fabe6277ae for instance with vm_state active and task_state None. [ 1784.227661] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.227857] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.228081] env[62510]: DEBUG nova.network.neutron [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.318796] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.320897] env[62510]: DEBUG nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1784.323379] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.624s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.324241] env[62510]: DEBUG nova.objects.instance [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lazy-loading 'resources' on Instance uuid 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1784.343102] env[62510]: DEBUG oslo_vmware.api [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769175, 'name': PowerOnVM_Task, 'duration_secs': 1.535274} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.343410] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1784.343594] env[62510]: INFO nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Took 11.67 seconds to spawn the instance on the hypervisor. [ 1784.343776] env[62510]: DEBUG nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1784.344929] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26defcab-d85b-4128-acaa-b684b8cd82eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.348504] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.499730] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769178, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.657288] env[62510]: DEBUG nova.network.neutron [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Successfully updated port: 82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1784.765733] env[62510]: WARNING nova.network.neutron [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] 940be04f-b555-4383-aaf8-63734d94a773 already exists in list: networks containing: ['940be04f-b555-4383-aaf8-63734d94a773']. ignoring it [ 1784.830361] env[62510]: DEBUG nova.compute.utils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1784.835979] env[62510]: DEBUG nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1784.836136] env[62510]: DEBUG nova.network.neutron [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1784.866208] env[62510]: INFO nova.compute.manager [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Took 26.12 seconds to build instance. [ 1784.917133] env[62510]: DEBUG nova.policy [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10c2a5dbd8e14ad79f462dabaecdd6bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aef2814e748c4e8d8dfb96d426bf9727', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1785.004714] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769178, 'name': Rename_Task, 'duration_secs': 0.531467} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.005024] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1785.005298] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34b6e0b1-4e68-4ec1-91cc-be25a1f1f465 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.014420] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1785.014420] env[62510]: value = "task-1769179" [ 1785.014420] env[62510]: _type = "Task" [ 1785.014420] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.025068] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769179, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.159797] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "refresh_cache-f40078f0-af6b-480b-96e6-4117022c87e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.160030] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired lock "refresh_cache-f40078f0-af6b-480b-96e6-4117022c87e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.160350] env[62510]: DEBUG nova.network.neutron [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1785.180814] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31941cf5-da79-4fb7-a358-0584ea4e949b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.191928] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d75c625-35fb-43ba-b53a-f94811f73a92 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.225472] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c2801d-c26e-4593-94ed-8adf197f7333 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.236210] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ced678-4ec3-4693-be79-650d4dfd4150 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.252578] env[62510]: DEBUG nova.compute.provider_tree [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1785.331540] env[62510]: DEBUG nova.network.neutron [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "address": "fa:16:3e:69:fc:ef", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19ad8f3d-cc", "ovs_interfaceid": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.337058] env[62510]: DEBUG nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1785.368378] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2697e290-f15b-41e3-a927-5985ef675a69 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.626s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.440467] env[62510]: DEBUG nova.network.neutron [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Successfully created port: 89aed103-9d06-4efa-9cf0-72267b06a41d {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.527178] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769179, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.725667] env[62510]: DEBUG nova.network.neutron [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1785.757306] env[62510]: DEBUG nova.scheduler.client.report [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1785.833645] env[62510]: DEBUG nova.compute.manager [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1785.834170] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.834754] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.834915] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.835752] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b0d894e-7171-430d-b2a0-b8b8314c7b4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.843884] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f23d24-f11e-4489-a6da-7fb60fd4ddc7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.873942] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1785.874210] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1785.874475] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1785.874550] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1785.874697] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1785.874833] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1785.875048] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1785.875217] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1785.875386] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1785.875553] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1785.875727] env[62510]: DEBUG nova.virt.hardware [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1785.883263] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfiguring VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1785.883987] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52e31221-e44b-448b-9757-d058ef26cd58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.909016] env[62510]: DEBUG oslo_vmware.api [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1785.909016] env[62510]: value = "task-1769180" [ 1785.909016] env[62510]: _type = "Task" [ 1785.909016] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.922991] env[62510]: DEBUG oslo_vmware.api [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769180, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.978696] env[62510]: DEBUG nova.network.neutron [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Updating instance_info_cache with network_info: [{"id": "82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745", "address": "fa:16:3e:55:52:92", "network": {"id": "37039935-bfed-4317-b0bb-aa8df5f89dbd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1046853355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc4a16c9d5d346489a9c8efec041df23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d9ee8a-e0", "ovs_interfaceid": "82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.026862] env[62510]: DEBUG oslo_vmware.api [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769179, 'name': PowerOnVM_Task, 'duration_secs': 0.751072} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.027334] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1786.027585] env[62510]: INFO nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1786.028321] env[62510]: DEBUG nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1786.028644] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece4afe4-7fbf-4df3-bad3-d474d329b069 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.262396] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.939s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.265734] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.335s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.267961] env[62510]: INFO nova.compute.claims [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1786.283492] env[62510]: INFO nova.scheduler.client.report [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Deleted allocations for instance 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a [ 1786.356474] env[62510]: DEBUG nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1786.384187] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1786.384524] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1786.384674] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1786.384959] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1786.385198] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1786.385403] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1786.385679] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1786.385944] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1786.386206] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1786.386430] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1786.386623] env[62510]: DEBUG nova.virt.hardware [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1786.387898] env[62510]: INFO nova.compute.manager [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] instance snapshotting [ 1786.390883] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c07ef3a-4ec2-49b4-88ca-44bcee042470 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.395188] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab2bf08e-e059-4de7-8a86-f8ec854b333c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.420283] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beabb82d-a827-40fd-bbb6-26cab0851ae4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.428808] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e894b4d-327a-455d-97fe-111bcb6a2108 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.453505] env[62510]: DEBUG nova.compute.manager [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-changed-19ad8f3d-cc51-441e-862f-31fabe6277ae {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1786.455038] env[62510]: DEBUG nova.compute.manager [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing instance network info cache due to event network-changed-19ad8f3d-cc51-441e-862f-31fabe6277ae. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1786.455038] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.455038] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.455038] env[62510]: DEBUG nova.network.neutron [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing network info cache for port 19ad8f3d-cc51-441e-862f-31fabe6277ae {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1786.455939] env[62510]: DEBUG oslo_vmware.api [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.481083] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Releasing lock "refresh_cache-f40078f0-af6b-480b-96e6-4117022c87e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.481472] env[62510]: DEBUG nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Instance network_info: |[{"id": "82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745", "address": "fa:16:3e:55:52:92", "network": {"id": "37039935-bfed-4317-b0bb-aa8df5f89dbd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1046853355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc4a16c9d5d346489a9c8efec041df23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d9ee8a-e0", "ovs_interfaceid": "82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1786.482153] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:52:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1786.490270] env[62510]: DEBUG oslo.service.loopingcall [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1786.491051] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1786.491313] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68755dbe-6a12-411a-9fd1-f6e7ba97f6c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.514765] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1786.514765] env[62510]: value = "task-1769181" [ 1786.514765] env[62510]: _type = "Task" [ 1786.514765] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.524830] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769181, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.556455] env[62510]: INFO nova.compute.manager [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Took 26.86 seconds to build instance. [ 1786.793697] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7b3c2c9f-3159-4808-80f1-23f38edfd7f8 tempest-ServersWithSpecificFlavorTestJSON-1514187069 tempest-ServersWithSpecificFlavorTestJSON-1514187069-project-member] Lock "0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.408s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.935884] env[62510]: DEBUG oslo_vmware.api [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769180, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.958279] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1786.960828] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-46ba45be-b69e-4d9c-ae74-78ea7b9dc6f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.970138] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1786.970138] env[62510]: value = "task-1769182" [ 1786.970138] env[62510]: _type = "Task" [ 1786.970138] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.979135] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769182, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.024984] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769181, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.047590] env[62510]: INFO nova.compute.manager [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Rescuing [ 1787.047869] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.048058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.048238] env[62510]: DEBUG nova.network.neutron [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1787.059491] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47427302-fe09-4199-9e4a-b00db5c72bb0 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.368s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.406288] env[62510]: DEBUG nova.network.neutron [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updated VIF entry in instance network info cache for port 19ad8f3d-cc51-441e-862f-31fabe6277ae. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1787.407017] env[62510]: DEBUG nova.network.neutron [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "address": "fa:16:3e:69:fc:ef", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19ad8f3d-cc", "ovs_interfaceid": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.440414] env[62510]: DEBUG oslo_vmware.api [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769180, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.453245] env[62510]: DEBUG nova.network.neutron [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Successfully updated port: 89aed103-9d06-4efa-9cf0-72267b06a41d {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1787.482540] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769182, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.544663] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769181, 'name': CreateVM_Task, 'duration_secs': 0.525395} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.547193] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1787.548062] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.548242] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.548587] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1787.548817] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c97ed936-3e45-494f-b02f-0aeb885976fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.556481] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1787.556481] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520219ed-e0de-2f5b-966e-d44f830e940d" [ 1787.556481] env[62510]: _type = "Task" [ 1787.556481] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.570180] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520219ed-e0de-2f5b-966e-d44f830e940d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.629220] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b669263-aa54-4a7f-8813-b3932b34401c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.638963] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ecec1e-e3e3-4cb6-9fb1-508c222c9996 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.675535] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ecae45-01d6-4c69-8d7f-62e366b52bbb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.684207] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b2d430-e614-45a3-b5f5-8097d91da48e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.699309] env[62510]: DEBUG nova.compute.provider_tree [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1787.852405] env[62510]: DEBUG nova.network.neutron [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Updating instance_info_cache with network_info: [{"id": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "address": "fa:16:3e:f1:7e:dd", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0b771a0-7a", "ovs_interfaceid": "a0b771a0-7aa2-49f0-9945-9956c4260b99", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.912078] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.912078] env[62510]: DEBUG nova.compute.manager [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Received event network-vif-plugged-82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1787.912078] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Acquiring lock "f40078f0-af6b-480b-96e6-4117022c87e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.912337] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Lock "f40078f0-af6b-480b-96e6-4117022c87e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.912433] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Lock "f40078f0-af6b-480b-96e6-4117022c87e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.912666] env[62510]: DEBUG nova.compute.manager [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] No waiting events found dispatching network-vif-plugged-82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1787.912843] env[62510]: WARNING nova.compute.manager [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Received unexpected event network-vif-plugged-82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 for instance with vm_state building and task_state spawning. [ 1787.913020] env[62510]: DEBUG nova.compute.manager [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Received event network-changed-82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1787.913201] env[62510]: DEBUG nova.compute.manager [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Refreshing instance network info cache due to event network-changed-82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1787.913395] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Acquiring lock "refresh_cache-f40078f0-af6b-480b-96e6-4117022c87e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.913523] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Acquired lock "refresh_cache-f40078f0-af6b-480b-96e6-4117022c87e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.913691] env[62510]: DEBUG nova.network.neutron [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Refreshing network info cache for port 82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1787.936674] env[62510]: DEBUG oslo_vmware.api [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769180, 'name': ReconfigVM_Task, 'duration_secs': 1.762471} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.937088] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.937335] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfigured VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1787.956250] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "refresh_cache-1dc9e3b6-5e75-49b4-aef0-01200fb9be47" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.956392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquired lock "refresh_cache-1dc9e3b6-5e75-49b4-aef0-01200fb9be47" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.956545] env[62510]: DEBUG nova.network.neutron [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1787.982728] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769182, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.070466] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520219ed-e0de-2f5b-966e-d44f830e940d, 'name': SearchDatastore_Task, 'duration_secs': 0.013032} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.070834] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.071287] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1788.071399] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.071531] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.071732] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1788.072086] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8920591-6f6f-4787-b552-9d8143950cf3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.082843] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1788.083084] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1788.083960] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0e10fae-94d1-4f28-aa84-ff55001e3327 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.091294] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1788.091294] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52927bb0-1ed8-3039-1ddb-59243a0abf9b" [ 1788.091294] env[62510]: _type = "Task" [ 1788.091294] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.101267] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52927bb0-1ed8-3039-1ddb-59243a0abf9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.204545] env[62510]: DEBUG nova.scheduler.client.report [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1788.355681] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-8a230335-6388-45fb-a29e-9e63ddb4d5f2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.442590] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e4837da2-d7c8-43fd-b4e9-763786bf174c tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.249s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.484904] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769182, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.529013] env[62510]: DEBUG nova.network.neutron [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1788.603983] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52927bb0-1ed8-3039-1ddb-59243a0abf9b, 'name': SearchDatastore_Task, 'duration_secs': 0.012139} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.606982] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7862447-745f-4cef-8147-2729643e9ec0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.613999] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1788.613999] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52140311-c8c8-f441-49ef-c89770f2e9ea" [ 1788.613999] env[62510]: _type = "Task" [ 1788.613999] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.625722] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52140311-c8c8-f441-49ef-c89770f2e9ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.709756] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.710315] env[62510]: DEBUG nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1788.713138] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.492s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.714589] env[62510]: INFO nova.compute.claims [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1788.841561] env[62510]: DEBUG nova.network.neutron [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Updating instance_info_cache with network_info: [{"id": "89aed103-9d06-4efa-9cf0-72267b06a41d", "address": "fa:16:3e:51:a6:e8", "network": {"id": "26c6d309-4ec8-47ff-ad17-f7ec54141444", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1250434407-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef2814e748c4e8d8dfb96d426bf9727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89aed103-9d", "ovs_interfaceid": "89aed103-9d06-4efa-9cf0-72267b06a41d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.877048] env[62510]: DEBUG nova.network.neutron [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Updated VIF entry in instance network info cache for port 82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1788.877481] env[62510]: DEBUG nova.network.neutron [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Updating instance_info_cache with network_info: [{"id": "82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745", "address": "fa:16:3e:55:52:92", "network": {"id": "37039935-bfed-4317-b0bb-aa8df5f89dbd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1046853355-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc4a16c9d5d346489a9c8efec041df23", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82d9ee8a-e0", "ovs_interfaceid": "82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.982941] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769182, 'name': CreateSnapshot_Task, 'duration_secs': 1.541426} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.984664] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1788.984664] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dab42f-95f0-4a2a-b02b-0b7ff56afd34 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.124882] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52140311-c8c8-f441-49ef-c89770f2e9ea, 'name': SearchDatastore_Task, 'duration_secs': 0.055915} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.125212] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.125475] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f40078f0-af6b-480b-96e6-4117022c87e2/f40078f0-af6b-480b-96e6-4117022c87e2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1789.125738] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cdc4a243-b8f9-43ef-b830-bc5d043333e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.132540] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1789.132540] env[62510]: value = "task-1769183" [ 1789.132540] env[62510]: _type = "Task" [ 1789.132540] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.140923] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.188045] env[62510]: DEBUG nova.compute.manager [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Received event network-vif-plugged-89aed103-9d06-4efa-9cf0-72267b06a41d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1789.188367] env[62510]: DEBUG oslo_concurrency.lockutils [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] Acquiring lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.188648] env[62510]: DEBUG oslo_concurrency.lockutils [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.188911] env[62510]: DEBUG oslo_concurrency.lockutils [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.189167] env[62510]: DEBUG nova.compute.manager [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] No waiting events found dispatching network-vif-plugged-89aed103-9d06-4efa-9cf0-72267b06a41d {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1789.189407] env[62510]: WARNING nova.compute.manager [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Received unexpected event network-vif-plugged-89aed103-9d06-4efa-9cf0-72267b06a41d for instance with vm_state building and task_state spawning. [ 1789.189641] env[62510]: DEBUG nova.compute.manager [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Received event network-changed-89aed103-9d06-4efa-9cf0-72267b06a41d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1789.189870] env[62510]: DEBUG nova.compute.manager [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Refreshing instance network info cache due to event network-changed-89aed103-9d06-4efa-9cf0-72267b06a41d. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1789.190120] env[62510]: DEBUG oslo_concurrency.lockutils [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] Acquiring lock "refresh_cache-1dc9e3b6-5e75-49b4-aef0-01200fb9be47" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.219268] env[62510]: DEBUG nova.compute.utils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1789.222526] env[62510]: DEBUG nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Not allocating networking since 'none' was specified. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1789.344245] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Releasing lock "refresh_cache-1dc9e3b6-5e75-49b4-aef0-01200fb9be47" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.344644] env[62510]: DEBUG nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Instance network_info: |[{"id": "89aed103-9d06-4efa-9cf0-72267b06a41d", "address": "fa:16:3e:51:a6:e8", "network": {"id": "26c6d309-4ec8-47ff-ad17-f7ec54141444", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1250434407-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef2814e748c4e8d8dfb96d426bf9727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89aed103-9d", "ovs_interfaceid": "89aed103-9d06-4efa-9cf0-72267b06a41d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1789.345025] env[62510]: DEBUG oslo_concurrency.lockutils [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] Acquired lock "refresh_cache-1dc9e3b6-5e75-49b4-aef0-01200fb9be47" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.345236] env[62510]: DEBUG nova.network.neutron [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Refreshing network info cache for port 89aed103-9d06-4efa-9cf0-72267b06a41d {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1789.346620] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:a6:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da0e5087-d65b-416f-90fe-beaa9c534ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89aed103-9d06-4efa-9cf0-72267b06a41d', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1789.358531] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Creating folder: Project (aef2814e748c4e8d8dfb96d426bf9727). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1789.362935] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61b418e5-3e0c-4ed8-82a5-4c178e32ee0a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.379028] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Created folder: Project (aef2814e748c4e8d8dfb96d426bf9727) in parent group-v367197. [ 1789.379376] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Creating folder: Instances. Parent ref: group-v367422. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1789.380242] env[62510]: DEBUG oslo_concurrency.lockutils [req-80b1d744-aea8-437c-87cb-4359a80cd57b req-228639ef-a3be-4313-899b-0801ac188ab8 service nova] Releasing lock "refresh_cache-f40078f0-af6b-480b-96e6-4117022c87e2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.380746] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1dd53dfc-ff74-4aa6-8d47-b9d806aca179 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.397389] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Created folder: Instances in parent group-v367422. [ 1789.397919] env[62510]: DEBUG oslo.service.loopingcall [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1789.398122] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1789.398600] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fca99b54-ef98-47d8-aaad-b002f90a77e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.438666] env[62510]: DEBUG nova.compute.manager [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1789.440450] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d38a3f-b023-4a2e-8d6e-6c152e1456e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.458203] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1789.458203] env[62510]: value = "task-1769186" [ 1789.458203] env[62510]: _type = "Task" [ 1789.458203] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.467791] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769186, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.490590] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.490590] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.504737] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1789.506050] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-98a790a4-72a3-4d53-8295-c7f09fb3fe7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.521105] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1789.521105] env[62510]: value = "task-1769187" [ 1789.521105] env[62510]: _type = "Task" [ 1789.521105] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.544804] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769187, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.646367] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769183, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.724210] env[62510]: DEBUG nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1789.839157] env[62510]: DEBUG nova.network.neutron [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Updated VIF entry in instance network info cache for port 89aed103-9d06-4efa-9cf0-72267b06a41d. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1789.839564] env[62510]: DEBUG nova.network.neutron [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Updating instance_info_cache with network_info: [{"id": "89aed103-9d06-4efa-9cf0-72267b06a41d", "address": "fa:16:3e:51:a6:e8", "network": {"id": "26c6d309-4ec8-47ff-ad17-f7ec54141444", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1250434407-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aef2814e748c4e8d8dfb96d426bf9727", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da0e5087-d65b-416f-90fe-beaa9c534ad3", "external-id": "nsx-vlan-transportzone-522", "segmentation_id": 522, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89aed103-9d", "ovs_interfaceid": "89aed103-9d06-4efa-9cf0-72267b06a41d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.892337] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1789.892681] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59b91eca-bb1b-46c8-8b69-347abae6ea34 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.906606] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1789.906606] env[62510]: value = "task-1769188" [ 1789.906606] env[62510]: _type = "Task" [ 1789.906606] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.923211] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769188, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.958499] env[62510]: INFO nova.compute.manager [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] instance snapshotting [ 1789.967013] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa0cdb1-fc08-4801-8eb0-ca8771b29207 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.995170] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769186, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.010141] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee8abec-1241-4398-92e1-4bb302b07258 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.013850] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1790.014092] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1790.038995] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769187, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.149445] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.645801} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.149445] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f40078f0-af6b-480b-96e6-4117022c87e2/f40078f0-af6b-480b-96e6-4117022c87e2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1790.149445] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1790.149445] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c75594b-5d72-46cd-b042-f280cdee03e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.156097] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1790.156097] env[62510]: value = "task-1769189" [ 1790.156097] env[62510]: _type = "Task" [ 1790.156097] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.167383] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769189, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.217461] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf0de46-e62b-4244-9adf-bf7f2d25fe19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.227021] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0553269-eb97-4852-a82d-5c45ecdba50b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.266948] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b6ddba-997b-4ceb-904e-f9870c6105da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.276194] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f6fb9d-31c6-495f-a78e-a4ee66a45c77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.293747] env[62510]: DEBUG nova.compute.provider_tree [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.348152] env[62510]: DEBUG oslo_concurrency.lockutils [req-43c96342-0894-4138-b64e-4e8ab0c0bacb req-dbaae4af-0680-4b4a-84b1-fa4e4a5fd81f service nova] Releasing lock "refresh_cache-1dc9e3b6-5e75-49b4-aef0-01200fb9be47" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.374106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "dabc046f-10f5-43d8-90f8-507dcb4d0144" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.374106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.374106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "dabc046f-10f5-43d8-90f8-507dcb4d0144-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.374106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.374417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.376113] env[62510]: INFO nova.compute.manager [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Terminating instance [ 1790.418395] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769188, 'name': PowerOffVM_Task, 'duration_secs': 0.296776} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.418670] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1790.419546] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60175531-2b20-4e73-99da-34fa7159183d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.439047] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eb33e5-719b-42b3-9ac7-74905a0daf07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.473864] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769186, 'name': CreateVM_Task, 'duration_secs': 0.591981} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.475651] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1790.475848] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1790.476078] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f4a30f1-e4b6-4332-a43d-af0c38da5d2b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.478142] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.478245] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.478553] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1790.479133] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1e4d105-a33a-4eb5-a05e-d4c93920c67d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.484614] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1790.484614] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52adb3dd-eaf9-043e-234b-d20c02a79480" [ 1790.484614] env[62510]: _type = "Task" [ 1790.484614] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.489124] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1790.489124] env[62510]: value = "task-1769190" [ 1790.489124] env[62510]: _type = "Task" [ 1790.489124] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.495939] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52adb3dd-eaf9-043e-234b-d20c02a79480, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.503146] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1790.503381] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1790.503631] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.503830] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.503963] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1790.504574] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a91f88a8-3881-4050-bf2c-3ee8395591df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.514981] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1790.514981] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1790.515998] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25256d39-5b60-483b-b33d-06c46de207b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.529580] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1790.529580] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5224db89-07c9-fe5c-743f-9859dcc89fde" [ 1790.529580] env[62510]: _type = "Task" [ 1790.529580] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.534941] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1790.535329] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-464a1cf4-d97e-4879-b63b-3ac6c7500d61 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.545118] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769187, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.551036] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5224db89-07c9-fe5c-743f-9859dcc89fde, 'name': SearchDatastore_Task, 'duration_secs': 0.016346} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.551224] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1790.551224] env[62510]: value = "task-1769191" [ 1790.551224] env[62510]: _type = "Task" [ 1790.551224] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.552360] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d5aa47b-6c65-49af-a50f-6f9bbb7cbb22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.563169] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1790.563169] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52516971-618e-2bbf-05da-f12bf921b227" [ 1790.563169] env[62510]: _type = "Task" [ 1790.563169] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.566314] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769191, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.574915] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52516971-618e-2bbf-05da-f12bf921b227, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.667389] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769189, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094509} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.667683] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1790.668719] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb827a2-1eed-459b-a727-0025d4e76d96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.693154] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] f40078f0-af6b-480b-96e6-4117022c87e2/f40078f0-af6b-480b-96e6-4117022c87e2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1790.693801] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fea42c5-edaa-43e2-a37b-427c3f600438 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.724731] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1790.724731] env[62510]: value = "task-1769192" [ 1790.724731] env[62510]: _type = "Task" [ 1790.724731] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.734037] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769192, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.768633] env[62510]: DEBUG nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1790.795113] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.795500] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.795741] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.796027] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.796270] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.796509] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.796808] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.797071] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.797274] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.797448] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.797624] env[62510]: DEBUG nova.virt.hardware [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.798487] env[62510]: DEBUG nova.scheduler.client.report [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1790.802101] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b9bb26-4a48-4d31-af88-0eb27385802c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.810718] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51373359-0c48-4ae4-8cef-a263647b8c7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.825690] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1790.831224] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Creating folder: Project (39878ac3e77f4f9fb36bafbe617a0b9c). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1790.831805] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-462b2eb5-30a3-4314-b44a-74dc76877f5e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.843470] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Created folder: Project (39878ac3e77f4f9fb36bafbe617a0b9c) in parent group-v367197. [ 1790.843691] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Creating folder: Instances. Parent ref: group-v367426. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1790.843882] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08120881-37b8-43db-af04-00b7593a9943 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.848960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-1bc2d7ec-858c-45a9-8966-8c35ee7ef110" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.849274] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-1bc2d7ec-858c-45a9-8966-8c35ee7ef110" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.849642] env[62510]: DEBUG nova.objects.instance [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'flavor' on Instance uuid 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1790.855600] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Created folder: Instances in parent group-v367426. [ 1790.855746] env[62510]: DEBUG oslo.service.loopingcall [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.855856] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1790.856127] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc45fcaf-4880-4f30-a67e-07ccc45a23eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.875320] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1790.875320] env[62510]: value = "task-1769195" [ 1790.875320] env[62510]: _type = "Task" [ 1790.875320] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.879061] env[62510]: DEBUG nova.compute.manager [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1790.879355] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1790.880467] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efb98ef-9636-4943-a28a-6cac11e54a84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.885987] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769195, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.890670] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1790.890920] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0239105b-2a0e-42b6-a6ad-7948686280ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.897624] env[62510]: DEBUG oslo_vmware.api [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1790.897624] env[62510]: value = "task-1769196" [ 1790.897624] env[62510]: _type = "Task" [ 1790.897624] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.906012] env[62510]: DEBUG oslo_vmware.api [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.995603] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52adb3dd-eaf9-043e-234b-d20c02a79480, 'name': SearchDatastore_Task, 'duration_secs': 0.013171} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.995940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.996232] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1790.996463] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.033996] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769187, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.062369] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769191, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.067209] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.067289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.067441] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1791.077419] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52516971-618e-2bbf-05da-f12bf921b227, 'name': SearchDatastore_Task, 'duration_secs': 0.012113} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.078249] env[62510]: DEBUG oslo_concurrency.lockutils [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.078511] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. {{(pid=62510) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1791.078802] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.079033] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1791.079268] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84f67524-0731-4858-ab2f-21d722ccc2e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.081445] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73bb0b20-34a1-4265-a8ed-07b017b7477c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.092028] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1791.092028] env[62510]: value = "task-1769197" [ 1791.092028] env[62510]: _type = "Task" [ 1791.092028] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.095624] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1791.095804] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1791.097051] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-718f83df-3613-4413-83e6-106fd22b79d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.103431] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769197, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.107421] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1791.107421] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f3bdcb-8c01-f636-4683-740217c51c75" [ 1791.107421] env[62510]: _type = "Task" [ 1791.107421] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.117386] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f3bdcb-8c01-f636-4683-740217c51c75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.237828] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769192, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.306360] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.593s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.307121] env[62510]: DEBUG nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1791.310193] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.176s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.310584] env[62510]: DEBUG nova.objects.instance [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'resources' on Instance uuid 83fa0d32-18ee-401d-af0b-a0adb538e5f4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1791.394087] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769195, 'name': CreateVM_Task, 'duration_secs': 0.401766} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.394351] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1791.394818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.396391] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.396809] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1791.397217] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fb5156b-89f3-4395-9dc1-e2de5363d173 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.407856] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1791.407856] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ddf41f-712b-48a0-2b04-c50e6ef895ae" [ 1791.407856] env[62510]: _type = "Task" [ 1791.407856] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.411674] env[62510]: DEBUG oslo_vmware.api [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769196, 'name': PowerOffVM_Task, 'duration_secs': 0.331217} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.415453] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1791.415755] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1791.416091] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6eae9fdf-c30b-464e-853d-2af9fe2656a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.427828] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ddf41f-712b-48a0-2b04-c50e6ef895ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.513317] env[62510]: DEBUG nova.objects.instance [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'pci_requests' on Instance uuid 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1791.521066] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1791.522041] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1791.522041] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Deleting the datastore file [datastore1] dabc046f-10f5-43d8-90f8-507dcb4d0144 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1791.522041] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71180d5f-53df-42ff-bd12-965ad0d6a571 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.540495] env[62510]: DEBUG oslo_vmware.api [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for the task: (returnval){ [ 1791.540495] env[62510]: value = "task-1769199" [ 1791.540495] env[62510]: _type = "Task" [ 1791.540495] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.544853] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769187, 'name': CloneVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.557336] env[62510]: DEBUG oslo_vmware.api [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.568370] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769191, 'name': CreateSnapshot_Task, 'duration_secs': 0.823081} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.568781] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1791.569615] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9724554e-fd23-4f5d-bea6-a2cdcb2c0f1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.605971] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769197, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.624009] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f3bdcb-8c01-f636-4683-740217c51c75, 'name': SearchDatastore_Task, 'duration_secs': 0.011976} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.627850] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7f09b03-faf7-426e-8ff3-6d3654fa91d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.645738] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1791.645738] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]526ddbec-90cc-8fcf-777e-061a14e5ce98" [ 1791.645738] env[62510]: _type = "Task" [ 1791.645738] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.664783] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526ddbec-90cc-8fcf-777e-061a14e5ce98, 'name': SearchDatastore_Task, 'duration_secs': 0.015876} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.665049] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.665316] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1dc9e3b6-5e75-49b4-aef0-01200fb9be47/1dc9e3b6-5e75-49b4-aef0-01200fb9be47.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1791.665575] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f215c2d8-4d46-414b-bed2-bd1f6696f5c7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.674703] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1791.674703] env[62510]: value = "task-1769200" [ 1791.674703] env[62510]: _type = "Task" [ 1791.674703] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.687733] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.737445] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769192, 'name': ReconfigVM_Task, 'duration_secs': 0.62996} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.737807] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Reconfigured VM instance instance-00000055 to attach disk [datastore1] f40078f0-af6b-480b-96e6-4117022c87e2/f40078f0-af6b-480b-96e6-4117022c87e2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1791.738451] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3b37075-00ff-4150-b030-9864474041e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.747570] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1791.747570] env[62510]: value = "task-1769201" [ 1791.747570] env[62510]: _type = "Task" [ 1791.747570] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.765913] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769201, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.814097] env[62510]: DEBUG nova.compute.utils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1791.818438] env[62510]: DEBUG nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Not allocating networking since 'none' was specified. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1791.926023] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ddf41f-712b-48a0-2b04-c50e6ef895ae, 'name': SearchDatastore_Task, 'duration_secs': 0.059212} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.926439] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.926718] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1791.927640] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.927640] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.927640] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1791.927836] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-adfcf6e8-17c0-4c47-9360-56962265c9ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.944241] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1791.944454] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1791.945311] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a507700e-2c81-4c37-9ee6-e4ec653b62d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.955036] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1791.955036] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521cf9db-de82-f552-5952-26691c3c5e73" [ 1791.955036] env[62510]: _type = "Task" [ 1791.955036] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.966402] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521cf9db-de82-f552-5952-26691c3c5e73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.019144] env[62510]: DEBUG nova.objects.base [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Object Instance<241d842d-3dd5-4ac2-a18a-12b9c9fbd340> lazy-loaded attributes: flavor,pci_requests {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1792.019477] env[62510]: DEBUG nova.network.neutron [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1792.037780] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769187, 'name': CloneVM_Task, 'duration_secs': 2.056591} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.040682] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Created linked-clone VM from snapshot [ 1792.041726] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85450f9e-667b-479f-ac1d-4599fa4e1325 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.052283] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Uploading image b835e517-0008-401a-949d-3c37f4e75c37 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1792.060066] env[62510]: DEBUG oslo_vmware.api [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Task: {'id': task-1769199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223664} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.060618] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1792.060842] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1792.061089] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1792.061262] env[62510]: INFO nova.compute.manager [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1792.061533] env[62510]: DEBUG oslo.service.loopingcall [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1792.061752] env[62510]: DEBUG nova.compute.manager [-] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1792.061871] env[62510]: DEBUG nova.network.neutron [-] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1792.081244] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1792.081244] env[62510]: value = "vm-367425" [ 1792.081244] env[62510]: _type = "VirtualMachine" [ 1792.081244] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1792.081537] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4a850094-af56-4d3c-a919-fefeaf4ae566 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.094787] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1792.098076] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-accf9e49-3934-486a-b72e-48d9b7f536da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.102022] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lease: (returnval){ [ 1792.102022] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e8ee56-5d24-05a7-10a7-1462dac3f439" [ 1792.102022] env[62510]: _type = "HttpNfcLease" [ 1792.102022] env[62510]: } obtained for exporting VM: (result){ [ 1792.102022] env[62510]: value = "vm-367425" [ 1792.102022] env[62510]: _type = "VirtualMachine" [ 1792.102022] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1792.102465] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the lease: (returnval){ [ 1792.102465] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e8ee56-5d24-05a7-10a7-1462dac3f439" [ 1792.102465] env[62510]: _type = "HttpNfcLease" [ 1792.102465] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1792.114354] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1792.114354] env[62510]: value = "task-1769203" [ 1792.114354] env[62510]: _type = "Task" [ 1792.114354] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.126786] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1792.126786] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e8ee56-5d24-05a7-10a7-1462dac3f439" [ 1792.126786] env[62510]: _type = "HttpNfcLease" [ 1792.126786] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1792.127121] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769197, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534267} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.130035] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1792.130035] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e8ee56-5d24-05a7-10a7-1462dac3f439" [ 1792.130035] env[62510]: _type = "HttpNfcLease" [ 1792.130035] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1792.130313] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. [ 1792.133615] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a721ce6-d784-419c-9973-4cf0e8e132d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.137371] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ce456a-ec81-454a-bd42-15f22fafe08c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.143318] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769203, 'name': CloneVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.148580] env[62510]: DEBUG nova.policy [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1792.169275] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eccf11-0b55-45ac-3daa-a5a0bd7b61c3/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1792.169483] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eccf11-0b55-45ac-3daa-a5a0bd7b61c3/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1792.178363] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1792.182012] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cca77875-3151-44db-b63c-f1b52278b273 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.265621] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1792.265621] env[62510]: value = "task-1769204" [ 1792.265621] env[62510]: _type = "Task" [ 1792.265621] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.272807] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466766} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.273101] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769201, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.276586] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1dc9e3b6-5e75-49b4-aef0-01200fb9be47/1dc9e3b6-5e75-49b4-aef0-01200fb9be47.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1792.277309] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1792.279563] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df96247e-787b-4ae9-a693-d107ff8f4eb3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.289405] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.290879] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1792.290879] env[62510]: value = "task-1769205" [ 1792.290879] env[62510]: _type = "Task" [ 1792.290879] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.297764] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecb662e-2158-489e-a5c6-ff992ea49362 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.308232] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-91af1ede-b8cc-4fbb-831b-df91272c506f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.310237] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769205, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.313934] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf727284-d5ac-4e56-a714-a6b7749898e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.318937] env[62510]: DEBUG nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1792.355335] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96c7019-b7a9-4cd6-b09f-17d539e39ac7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.367357] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc56e1c2-6746-4b68-836e-a4450b61e77d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.384348] env[62510]: DEBUG nova.compute.provider_tree [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.466642] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521cf9db-de82-f552-5952-26691c3c5e73, 'name': SearchDatastore_Task, 'duration_secs': 0.054504} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.467148] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-032ba4fa-0c97-4e6e-a69c-9c9056725c0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.473876] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1792.473876] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5254cceb-93e0-a3c6-b705-946955d9cf06" [ 1792.473876] env[62510]: _type = "Task" [ 1792.473876] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.482857] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5254cceb-93e0-a3c6-b705-946955d9cf06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.485129] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [{"id": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "address": "fa:16:3e:7d:cb:3f", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e31bab-04", "ovs_interfaceid": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.624573] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769203, 'name': CloneVM_Task} progress is 93%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.766608] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769201, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.784527] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.800895] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769205, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080053} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.801245] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1792.802084] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f665ac-f4c9-4998-a62e-8c371cf086bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.826676] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 1dc9e3b6-5e75-49b4-aef0-01200fb9be47/1dc9e3b6-5e75-49b4-aef0-01200fb9be47.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1792.830830] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d78e1f36-49a7-4f10-8e74-ef1dbdc84bd5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.853889] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1792.853889] env[62510]: value = "task-1769206" [ 1792.853889] env[62510]: _type = "Task" [ 1792.853889] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.864612] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769206, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.893838] env[62510]: DEBUG nova.scheduler.client.report [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1792.987102] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5254cceb-93e0-a3c6-b705-946955d9cf06, 'name': SearchDatastore_Task, 'duration_secs': 0.021319} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.987620] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.987963] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1792.988261] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.988530] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 31772dc9-4f04-42df-9e3b-3200cc72c977/31772dc9-4f04-42df-9e3b-3200cc72c977.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1792.988804] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.989019] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82422a44-0b23-4f55-87cc-0793c1b24bda {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.991477] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.992806] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.992806] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.992806] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.993306] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.993306] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1792.993306] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1792.999834] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1792.999834] env[62510]: value = "task-1769207" [ 1792.999834] env[62510]: _type = "Task" [ 1792.999834] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.014806] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.089539] env[62510]: DEBUG nova.network.neutron [-] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.126247] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769203, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.189526] env[62510]: DEBUG nova.compute.manager [req-5b1b1ecd-d6c8-4015-ad64-c17d8fe8f41e req-96bfed23-9ad7-46e1-88bb-9528a60d3738 service nova] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Received event network-vif-deleted-d6cdea66-2edf-49fa-9c50-5293d0a33351 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1793.267275] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769201, 'name': Rename_Task, 'duration_secs': 1.423499} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.267618] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1793.267982] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-19f8e844-6408-494c-8790-d37e0a70fb90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.278655] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1793.278655] env[62510]: value = "task-1769208" [ 1793.278655] env[62510]: _type = "Task" [ 1793.278655] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.285939] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.291606] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.350055] env[62510]: DEBUG nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1793.365997] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769206, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.383527] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1793.383859] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1793.384193] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1793.384450] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1793.384592] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1793.384759] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1793.385101] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1793.385938] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1793.385938] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1793.385938] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1793.386145] env[62510]: DEBUG nova.virt.hardware [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1793.387074] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349eba6e-9c86-4f37-bdf1-71ecffd43b17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.397904] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5b203b-1d8b-4a51-84d7-07221f642147 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.403337] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.093s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.406472] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.863s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1793.420415] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1793.427863] env[62510]: DEBUG oslo.service.loopingcall [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1793.429109] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1793.430232] env[62510]: INFO nova.scheduler.client.report [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted allocations for instance 83fa0d32-18ee-401d-af0b-a0adb538e5f4 [ 1793.431324] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2dd1ee6-9827-45e3-8ae7-4c0294312334 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.460153] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1793.460153] env[62510]: value = "task-1769209" [ 1793.460153] env[62510]: _type = "Task" [ 1793.460153] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.482143] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769209, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.497191] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1793.514257] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769207, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.594609] env[62510]: INFO nova.compute.manager [-] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Took 1.53 seconds to deallocate network for instance. [ 1793.628773] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769203, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.785828] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769204, 'name': ReconfigVM_Task, 'duration_secs': 1.343795} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.786807] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1793.787910] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef9c879-a558-4c80-a4d5-a24cfb1be12b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.794460] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769208, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.826059] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b4976ff4-e00b-4f53-a039-0590427e3c92 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.845334] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1793.845334] env[62510]: value = "task-1769210" [ 1793.845334] env[62510]: _type = "Task" [ 1793.845334] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.855143] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.865766] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769206, 'name': ReconfigVM_Task, 'duration_secs': 0.672793} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.866275] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 1dc9e3b6-5e75-49b4-aef0-01200fb9be47/1dc9e3b6-5e75-49b4-aef0-01200fb9be47.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1793.867121] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-872492d8-b087-4b68-8475-ec7d25473b08 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.877039] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1793.877039] env[62510]: value = "task-1769211" [ 1793.877039] env[62510]: _type = "Task" [ 1793.877039] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.888567] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769211, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.957035] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0a8190a5-cf1f-404d-b054-1f1da4b2c071 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "83fa0d32-18ee-401d-af0b-a0adb538e5f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.304s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.973427] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769209, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.014513] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791271} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.015282] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 31772dc9-4f04-42df-9e3b-3200cc72c977/31772dc9-4f04-42df-9e3b-3200cc72c977.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.017483] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1794.017483] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd59b947-e5e3-48bf-b703-6de7535f1334 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.029852] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1794.029852] env[62510]: value = "task-1769212" [ 1794.029852] env[62510]: _type = "Task" [ 1794.029852] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.043926] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769212, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.103756] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.131703] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769203, 'name': CloneVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.186368] env[62510]: DEBUG nova.network.neutron [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Successfully updated port: 1bc2d7ec-858c-45a9-8966-8c35ee7ef110 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1794.290391] env[62510]: DEBUG oslo_vmware.api [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769208, 'name': PowerOnVM_Task, 'duration_secs': 0.724371} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.293306] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1794.293635] env[62510]: INFO nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Took 10.49 seconds to spawn the instance on the hypervisor. [ 1794.293880] env[62510]: DEBUG nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1794.294985] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1192a926-564b-4016-95c1-259d06a84c56 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.343695] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd089d85-f861-4aee-8b80-2d6f41924ecf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.358160] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769210, 'name': ReconfigVM_Task, 'duration_secs': 0.239421} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.358979] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1794.360123] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d4bcf3-62df-4ec8-bfe7-9ba406eb635a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.363264] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91b20011-aa1f-48f5-b1ee-07e5dc4901d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.404567] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e07b667-43df-4bbb-bb02-51fe7d47854b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.408216] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1794.408216] env[62510]: value = "task-1769213" [ 1794.408216] env[62510]: _type = "Task" [ 1794.408216] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.417733] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769211, 'name': Rename_Task, 'duration_secs': 0.241849} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.419618] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8726fcf-971f-444f-bfa6-4b3f5f029f1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.424055] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1794.427394] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c18af226-0dbe-4599-9a04-7df5365f77e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.429068] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769213, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.439949] env[62510]: DEBUG nova.compute.provider_tree [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1794.442674] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1794.442674] env[62510]: value = "task-1769214" [ 1794.442674] env[62510]: _type = "Task" [ 1794.442674] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.451713] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.472540] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769209, 'name': CreateVM_Task, 'duration_secs': 0.617967} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.472753] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1794.473222] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.473395] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.473739] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1794.474034] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0697aef4-4518-49e4-9687-6e6f03015ecd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.479756] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1794.479756] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52530511-a5cb-d32f-f875-c94378ba2c44" [ 1794.479756] env[62510]: _type = "Task" [ 1794.479756] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.488878] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52530511-a5cb-d32f-f875-c94378ba2c44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.542658] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769212, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09142} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.543041] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1794.544052] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c204663-a545-483d-9ebd-147ca261e02c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.565907] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 31772dc9-4f04-42df-9e3b-3200cc72c977/31772dc9-4f04-42df-9e3b-3200cc72c977.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1794.566248] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0303dd61-1c51-412d-bedd-50a4968d9217 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.587761] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1794.587761] env[62510]: value = "task-1769215" [ 1794.587761] env[62510]: _type = "Task" [ 1794.587761] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.600959] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769215, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.627758] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769203, 'name': CloneVM_Task, 'duration_secs': 2.135285} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.627758] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Created linked-clone VM from snapshot [ 1794.628585] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b936a039-80c9-4711-b344-75c2c88033e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.638948] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Uploading image e9950cfb-6366-4124-8a28-ec0e21499f71 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1794.651163] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1794.651559] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c320b452-e26c-41d7-ac50-385332b45ab8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.660662] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1794.660662] env[62510]: value = "task-1769216" [ 1794.660662] env[62510]: _type = "Task" [ 1794.660662] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.669786] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769216, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.692201] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.692201] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.692468] env[62510]: DEBUG nova.network.neutron [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.817794] env[62510]: INFO nova.compute.manager [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Took 28.58 seconds to build instance. [ 1794.919572] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769213, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.945090] env[62510]: DEBUG nova.scheduler.client.report [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1794.962694] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769214, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.993467] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52530511-a5cb-d32f-f875-c94378ba2c44, 'name': SearchDatastore_Task, 'duration_secs': 0.011969} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.993902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.994341] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1794.994587] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.994804] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.995116] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1794.995483] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c1b4ad3-47af-4605-a64d-5157ed48a410 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.006485] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1795.006685] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1795.007501] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b99a91e-98bd-496c-b5bb-6c5c2058328e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.013464] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1795.013464] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529d163a-53e6-b91d-afb8-2d85c9e4d12e" [ 1795.013464] env[62510]: _type = "Task" [ 1795.013464] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.022934] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529d163a-53e6-b91d-afb8-2d85c9e4d12e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.099353] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769215, 'name': ReconfigVM_Task, 'duration_secs': 0.500551} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.099685] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 31772dc9-4f04-42df-9e3b-3200cc72c977/31772dc9-4f04-42df-9e3b-3200cc72c977.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1795.100361] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-249a8b9e-21b9-4af8-95c1-02fd179e07ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.109242] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1795.109242] env[62510]: value = "task-1769217" [ 1795.109242] env[62510]: _type = "Task" [ 1795.109242] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.118294] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769217, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.172470] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769216, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.274870] env[62510]: WARNING nova.network.neutron [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] 940be04f-b555-4383-aaf8-63734d94a773 already exists in list: networks containing: ['940be04f-b555-4383-aaf8-63734d94a773']. ignoring it [ 1795.275203] env[62510]: WARNING nova.network.neutron [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] 940be04f-b555-4383-aaf8-63734d94a773 already exists in list: networks containing: ['940be04f-b555-4383-aaf8-63734d94a773']. ignoring it [ 1795.320777] env[62510]: DEBUG oslo_concurrency.lockutils [None req-979c9209-399f-42b3-942e-976acd3497ec tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "f40078f0-af6b-480b-96e6-4117022c87e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.090s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.421942] env[62510]: DEBUG oslo_vmware.api [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769213, 'name': PowerOnVM_Task, 'duration_secs': 0.558049} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.422230] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1795.425305] env[62510]: DEBUG nova.compute.manager [None req-15d7244c-f7b8-44dd-bfc1-f30d309b9524 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1795.426154] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c579ee-21e4-4666-8472-7e4df6113b1a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.462848] env[62510]: DEBUG oslo_vmware.api [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769214, 'name': PowerOnVM_Task, 'duration_secs': 0.846685} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.463341] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1795.466022] env[62510]: INFO nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1795.466022] env[62510]: DEBUG nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1795.467223] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79dba889-f853-4a7e-b4cb-047a584b4f0a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.530589] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529d163a-53e6-b91d-afb8-2d85c9e4d12e, 'name': SearchDatastore_Task, 'duration_secs': 0.011913} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.531540] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bbb00d4-437c-4f0e-8703-c5bd31063d49 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.546337] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1795.546337] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c5af97-098b-8611-448f-418f58088646" [ 1795.546337] env[62510]: _type = "Task" [ 1795.546337] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.558929] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c5af97-098b-8611-448f-418f58088646, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.624024] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769217, 'name': Rename_Task, 'duration_secs': 0.285911} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.624024] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1795.624024] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da3b4e37-3b08-4730-8fa3-f76bd702075b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.630856] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1795.630856] env[62510]: value = "task-1769218" [ 1795.630856] env[62510]: _type = "Task" [ 1795.630856] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.639840] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.673169] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769216, 'name': Destroy_Task, 'duration_secs': 0.723204} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.673956] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Destroyed the VM [ 1795.673956] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1795.673956] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-35386161-407a-4260-9505-4c021fa49fda {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.682219] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1795.682219] env[62510]: value = "task-1769219" [ 1795.682219] env[62510]: _type = "Task" [ 1795.682219] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.694294] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769219, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.947588] env[62510]: DEBUG nova.compute.manager [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-vif-plugged-1bc2d7ec-858c-45a9-8966-8c35ee7ef110 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1795.947588] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.947588] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.947722] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.947881] env[62510]: DEBUG nova.compute.manager [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] No waiting events found dispatching network-vif-plugged-1bc2d7ec-858c-45a9-8966-8c35ee7ef110 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1795.948282] env[62510]: WARNING nova.compute.manager [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received unexpected event network-vif-plugged-1bc2d7ec-858c-45a9-8966-8c35ee7ef110 for instance with vm_state active and task_state None. [ 1795.948499] env[62510]: DEBUG nova.compute.manager [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-changed-1bc2d7ec-858c-45a9-8966-8c35ee7ef110 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1795.948659] env[62510]: DEBUG nova.compute.manager [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing instance network info cache due to event network-changed-1bc2d7ec-858c-45a9-8966-8c35ee7ef110. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1795.948826] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.965324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.559s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.968956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.910s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.970464] env[62510]: INFO nova.compute.claims [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1795.988375] env[62510]: INFO nova.compute.manager [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Took 29.11 seconds to build instance. [ 1796.063911] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c5af97-098b-8611-448f-418f58088646, 'name': SearchDatastore_Task, 'duration_secs': 0.01474} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.064260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.064686] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1796.064887] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b636c1a-78ee-4e7a-98ea-356e5488cf26 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.075319] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1796.075319] env[62510]: value = "task-1769220" [ 1796.075319] env[62510]: _type = "Task" [ 1796.075319] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.089022] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.107635] env[62510]: DEBUG nova.network.neutron [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "address": "fa:16:3e:69:fc:ef", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19ad8f3d-cc", "ovs_interfaceid": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1bc2d7ec-858c-45a9-8966-8c35ee7ef110", "address": "fa:16:3e:49:27:30", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bc2d7ec-85", "ovs_interfaceid": "1bc2d7ec-858c-45a9-8966-8c35ee7ef110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.142851] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769218, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.193269] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769219, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.490210] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a68477b-b765-4211-b0a3-f6911389943d tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.651s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.567743] env[62510]: INFO nova.scheduler.client.report [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted allocation for migration 25f05b49-1007-47d0-bf52-3f3d8c3c0d9b [ 1796.585557] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769220, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.612044] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.612807] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.613630] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.614360] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.614580] env[62510]: DEBUG nova.network.neutron [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Refreshing network info cache for port 1bc2d7ec-858c-45a9-8966-8c35ee7ef110 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1796.616648] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a2135e-c0b6-44ee-b321-201900bd1d4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.636631] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1796.636924] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1796.637321] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1796.637579] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1796.637777] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1796.637945] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1796.638193] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1796.639113] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1796.639113] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1796.639113] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1796.639113] env[62510]: DEBUG nova.virt.hardware [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1796.645555] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfiguring VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1796.650420] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5f329ae-bc84-4564-a8c3-05858a3b12d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.670209] env[62510]: DEBUG oslo_vmware.api [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769218, 'name': PowerOnVM_Task, 'duration_secs': 0.864176} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.671786] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1796.672021] env[62510]: INFO nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Took 5.90 seconds to spawn the instance on the hypervisor. [ 1796.673221] env[62510]: DEBUG nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1796.673221] env[62510]: DEBUG oslo_vmware.api [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1796.673221] env[62510]: value = "task-1769221" [ 1796.673221] env[62510]: _type = "Task" [ 1796.673221] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.673703] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9611d3-45cf-4416-8d08-71c9ef79b23d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.689976] env[62510]: DEBUG oslo_vmware.api [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769221, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.701045] env[62510]: DEBUG oslo_vmware.api [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769219, 'name': RemoveSnapshot_Task, 'duration_secs': 0.898084} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.701045] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1797.079987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-02edb596-2058-4757-92c6-7ae260418478 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 24.962s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.097328] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769220, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.190346] env[62510]: DEBUG oslo_vmware.api [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769221, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.204648] env[62510]: INFO nova.compute.manager [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Took 28.29 seconds to build instance. [ 1797.207081] env[62510]: WARNING nova.compute.manager [None req-fab329b8-2e25-42a4-9437-e077c5825a73 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Image not found during snapshot: nova.exception.ImageNotFound: Image e9950cfb-6366-4124-8a28-ec0e21499f71 could not be found. [ 1797.410272] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c31040b-8c7f-442a-add7-3ba606e4919f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.421872] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88bcbff-04f1-4b95-9086-0b55ad9e14d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.468128] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9311d104-f1f7-4a85-a3bf-189c53fc68ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.480008] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9b3df5-4de5-4d12-8ec3-7380375eb2de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.502025] env[62510]: DEBUG nova.compute.provider_tree [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1797.598068] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769220, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.44542} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.598579] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1797.599594] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1797.599594] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0be2129-d83f-4b54-a481-cdaf5c1823bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.611263] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1797.611263] env[62510]: value = "task-1769222" [ 1797.611263] env[62510]: _type = "Task" [ 1797.611263] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.625551] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.695262] env[62510]: DEBUG oslo_vmware.api [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769221, 'name': ReconfigVM_Task, 'duration_secs': 0.959531} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.695262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.695262] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfigured VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1797.707490] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de2c86f1-bee2-4691-971c-6360906da1bb tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "31772dc9-4f04-42df-9e3b-3200cc72c977" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.805s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.710700] env[62510]: DEBUG nova.network.neutron [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updated VIF entry in instance network info cache for port 1bc2d7ec-858c-45a9-8966-8c35ee7ef110. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1797.711293] env[62510]: DEBUG nova.network.neutron [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "address": "fa:16:3e:69:fc:ef", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19ad8f3d-cc", "ovs_interfaceid": "19ad8f3d-cc51-441e-862f-31fabe6277ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1bc2d7ec-858c-45a9-8966-8c35ee7ef110", "address": "fa:16:3e:49:27:30", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bc2d7ec-85", "ovs_interfaceid": "1bc2d7ec-858c-45a9-8966-8c35ee7ef110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.718710] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.720886] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.720886] env[62510]: DEBUG nova.compute.manager [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1797.723298] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78c9669-e039-4d52-9183-a87fccb6f102 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.735101] env[62510]: DEBUG nova.compute.manager [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1797.735819] env[62510]: DEBUG nova.objects.instance [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'flavor' on Instance uuid f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1798.032174] env[62510]: ERROR nova.scheduler.client.report [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [req-4cdbbb97-3eb7-4f20-b3be-b8c239bfda4f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4cdbbb97-3eb7-4f20-b3be-b8c239bfda4f"}]} [ 1798.051481] env[62510]: DEBUG nova.scheduler.client.report [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1798.078737] env[62510]: DEBUG nova.scheduler.client.report [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1798.079303] env[62510]: DEBUG nova.compute.provider_tree [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1798.093273] env[62510]: DEBUG nova.scheduler.client.report [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1798.126821] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153336} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.128661] env[62510]: DEBUG nova.scheduler.client.report [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1798.135548] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1798.137514] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fe3705-2952-4470-8d6d-061b4e06e119 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.173906] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1798.179360] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b34a54e4-0f87-4e07-83d3-4b722dec1c44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.201177] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9720fe1f-a22c-4122-91d5-1ac32c853ec2 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-1bc2d7ec-858c-45a9-8966-8c35ee7ef110" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.351s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.206145] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1798.206145] env[62510]: value = "task-1769223" [ 1798.206145] env[62510]: _type = "Task" [ 1798.206145] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.216964] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.219742] env[62510]: DEBUG oslo_concurrency.lockutils [req-9d2019e4-984c-4d44-8f89-cf4b01626a3e req-5e42e3dd-e9f5-4b29-b6f1-58c918210d4d service nova] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.504962] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d152a8-ffa4-46a5-838b-ea8b2d755228 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.516158] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46de3061-d77e-42f0-86cb-6e516fd1da26 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.553956] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ddc9e79-6a96-45c7-ab96-64c88b66d91a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.562590] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32f0e75-8987-4964-a5e1-d0bcda26b7ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.580641] env[62510]: DEBUG nova.compute.provider_tree [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1798.719401] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769223, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.745969] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1798.746309] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-804e16fe-f8bd-436e-9db7-6b5128fb8ca7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.756195] env[62510]: DEBUG oslo_vmware.api [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1798.756195] env[62510]: value = "task-1769224" [ 1798.756195] env[62510]: _type = "Task" [ 1798.756195] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.767050] env[62510]: DEBUG oslo_vmware.api [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.864337] env[62510]: DEBUG nova.compute.manager [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1798.865356] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d0f474-ecd1-4944-bdac-b5c6d3f44151 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.933662] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.936023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.936023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.936023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.936023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.937169] env[62510]: INFO nova.compute.manager [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Terminating instance [ 1799.128022] env[62510]: DEBUG nova.scheduler.client.report [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1799.128022] env[62510]: DEBUG nova.compute.provider_tree [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 117 to 118 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1799.128022] env[62510]: DEBUG nova.compute.provider_tree [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1799.228205] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769223, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.270353] env[62510]: DEBUG oslo_vmware.api [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769224, 'name': PowerOffVM_Task, 'duration_secs': 0.40405} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.272022] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1799.272022] env[62510]: DEBUG nova.compute.manager [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1799.272661] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f001f3d-f575-41e5-a5e6-8edd4745f8b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.383412] env[62510]: INFO nova.compute.manager [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] instance snapshotting [ 1799.386595] env[62510]: DEBUG nova.objects.instance [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'flavor' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1799.442081] env[62510]: DEBUG nova.compute.manager [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1799.442334] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1799.443487] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3111ee7b-07a7-49c9-8a73-e2d8b069570e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.454262] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1799.454593] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef1fb393-53ba-43ec-b95f-b380ef7e1faa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.463459] env[62510]: DEBUG oslo_vmware.api [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1799.463459] env[62510]: value = "task-1769225" [ 1799.463459] env[62510]: _type = "Task" [ 1799.463459] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.481838] env[62510]: DEBUG oslo_vmware.api [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.635495] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.666s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.636356] env[62510]: DEBUG nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1799.642552] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.702s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.643106] env[62510]: DEBUG nova.objects.instance [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lazy-loading 'resources' on Instance uuid 16b5d928-94fe-4fd5-9909-775c28d7edd2 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1799.719883] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769223, 'name': ReconfigVM_Task, 'duration_secs': 1.370856} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.721419] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1799.722329] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c12d933-0d3a-42f1-bb8e-f2bbb7cd48fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.730805] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1799.730805] env[62510]: value = "task-1769226" [ 1799.730805] env[62510]: _type = "Task" [ 1799.730805] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.746957] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769226, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.790735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b6d599b-5288-467e-9b6c-9379724e7952 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.072s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.893199] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932d1de8-7528-4d1d-bd7f-ccf49997420f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.922156] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e1c55a-3b1c-4f71-9b74-9043900c2623 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.990677] env[62510]: DEBUG oslo_vmware.api [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769225, 'name': PowerOffVM_Task, 'duration_secs': 0.479401} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.990972] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1799.991161] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1799.991724] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4bc69ceb-b554-4e15-a633-ae99e4854297 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.106643] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1800.106643] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1800.106765] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleting the datastore file [datastore1] 9373089f-dbd4-4ac9-8736-e4c929fe6fb0 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1800.107687] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba5c83a0-e5b4-446b-b35b-0c05411a6a96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.115755] env[62510]: DEBUG oslo_vmware.api [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1800.115755] env[62510]: value = "task-1769228" [ 1800.115755] env[62510]: _type = "Task" [ 1800.115755] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.126471] env[62510]: DEBUG oslo_vmware.api [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.147100] env[62510]: DEBUG nova.compute.utils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1800.154391] env[62510]: DEBUG nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1800.154693] env[62510]: DEBUG nova.network.neutron [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1800.217113] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "f40078f0-af6b-480b-96e6-4117022c87e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.217445] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "f40078f0-af6b-480b-96e6-4117022c87e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.217691] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "f40078f0-af6b-480b-96e6-4117022c87e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.217927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "f40078f0-af6b-480b-96e6-4117022c87e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.218097] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "f40078f0-af6b-480b-96e6-4117022c87e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.220268] env[62510]: INFO nova.compute.manager [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Terminating instance [ 1800.247896] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769226, 'name': Rename_Task, 'duration_secs': 0.207532} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.248257] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1800.249018] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-994c0f9f-0b6b-4326-9a32-9d194cbd5f21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.262256] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1800.262256] env[62510]: value = "task-1769229" [ 1800.262256] env[62510]: _type = "Task" [ 1800.262256] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.280724] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.290593] env[62510]: DEBUG nova.policy [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0b465ab9caf4d989219f1fbbebd00ce', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd98518565b744451ba90ba301267213f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1800.433277] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1800.434017] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1c85af38-0ef1-47a0-a475-3838cb2b3764 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.446250] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1800.446250] env[62510]: value = "task-1769230" [ 1800.446250] env[62510]: _type = "Task" [ 1800.446250] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.457598] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769230, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.483491] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.484026] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.484666] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.485159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.485296] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.490724] env[62510]: INFO nova.compute.manager [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Terminating instance [ 1800.566475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.566775] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.605149] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b879cd-fd71-4c92-9f5b-9b09c224f2ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.616106] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ee5e1e-952c-4d2d-bba2-fb7bcd79ea33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.665279] env[62510]: DEBUG oslo_vmware.api [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227309} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.666710] env[62510]: DEBUG nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1800.670629] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1800.670861] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1800.671123] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1800.671361] env[62510]: INFO nova.compute.manager [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1800.671619] env[62510]: DEBUG oslo.service.loopingcall [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.672406] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b6491e-fbf8-4fd3-97c5-d316dbc8da86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.675748] env[62510]: DEBUG nova.compute.manager [-] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1800.675864] env[62510]: DEBUG nova.network.neutron [-] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1800.684903] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74567636-4e4b-4b15-8d80-5b76aeba4c37 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.705031] env[62510]: DEBUG nova.compute.provider_tree [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1800.727733] env[62510]: DEBUG nova.compute.manager [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1800.728175] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1800.729254] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08a0479-b45b-44ed-872a-4fb5ff13a412 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.738887] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.739061] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac601834-bdb0-4657-8aab-797a66ec3c34 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.747494] env[62510]: DEBUG oslo_vmware.api [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1800.747494] env[62510]: value = "task-1769231" [ 1800.747494] env[62510]: _type = "Task" [ 1800.747494] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.756956] env[62510]: DEBUG oslo_vmware.api [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769231, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.777992] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769229, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.962681] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769230, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.995797] env[62510]: DEBUG nova.compute.manager [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1800.995797] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1800.996294] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf1d5f0-b3de-4356-b443-4017d224c64f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.005854] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1801.006308] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3904b2b3-13c2-4168-a0f2-048ffc88bd13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.015240] env[62510]: DEBUG oslo_vmware.api [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1801.015240] env[62510]: value = "task-1769232" [ 1801.015240] env[62510]: _type = "Task" [ 1801.015240] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.025507] env[62510]: DEBUG oslo_vmware.api [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.065963] env[62510]: DEBUG nova.network.neutron [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Successfully created port: ca1200b2-6f64-4952-a587-f2fdb0fc14d1 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1801.069836] env[62510]: DEBUG nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1801.237589] env[62510]: ERROR nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [req-9e0cee6f-b97f-461e-81c4-401e4160597e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9e0cee6f-b97f-461e-81c4-401e4160597e"}]} [ 1801.260931] env[62510]: DEBUG oslo_vmware.api [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769231, 'name': PowerOffVM_Task, 'duration_secs': 0.376137} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.260931] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.260931] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1801.260931] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-150c328f-e1a0-4a73-96c1-b77a7b5014c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.263787] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1801.283823] env[62510]: DEBUG oslo_vmware.api [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769229, 'name': PowerOnVM_Task, 'duration_secs': 0.556834} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.284977] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1801.285311] env[62510]: DEBUG nova.compute.provider_tree [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1801.288588] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1801.288850] env[62510]: INFO nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Took 7.94 seconds to spawn the instance on the hypervisor. [ 1801.289068] env[62510]: DEBUG nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1801.290567] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd81cf99-2d9a-4cb1-9725-adfa3df412ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.304703] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1801.340455] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1801.359178] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1801.359858] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1801.359858] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Deleting the datastore file [datastore1] f40078f0-af6b-480b-96e6-4117022c87e2 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1801.361219] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c380586-59ce-428c-81df-b812236aed84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.369529] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "77f485ae-9c4c-424e-8bac-6d023e428767" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.369830] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.370186] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.370519] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.370852] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.376815] env[62510]: DEBUG oslo_vmware.api [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for the task: (returnval){ [ 1801.376815] env[62510]: value = "task-1769234" [ 1801.376815] env[62510]: _type = "Task" [ 1801.376815] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.377436] env[62510]: INFO nova.compute.manager [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Terminating instance [ 1801.394812] env[62510]: DEBUG oslo_vmware.api [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.453680] env[62510]: DEBUG nova.objects.instance [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'flavor' on Instance uuid f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.460825] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769230, 'name': CreateSnapshot_Task, 'duration_secs': 1.002344} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.461459] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1801.462388] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbad53a-df64-4029-affa-fca2c9e54e31 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.528549] env[62510]: DEBUG oslo_vmware.api [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769232, 'name': PowerOffVM_Task, 'duration_secs': 0.287215} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.531373] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.531602] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1801.533441] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7a1b71c-ef69-49c5-9299-759e1ba2e6e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.609259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.679686] env[62510]: DEBUG nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1801.707346] env[62510]: DEBUG nova.compute.manager [req-fc25c465-75a1-49d3-988b-833266f6386b req-4ef35a77-6284-49a0-96fe-214475020c56 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Received event network-vif-deleted-c380698b-8ec4-4110-a0c8-89549fc49f68 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1801.707552] env[62510]: INFO nova.compute.manager [req-fc25c465-75a1-49d3-988b-833266f6386b req-4ef35a77-6284-49a0-96fe-214475020c56 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Neutron deleted interface c380698b-8ec4-4110-a0c8-89549fc49f68; detaching it from the instance and deleting it from the info cache [ 1801.707776] env[62510]: DEBUG nova.network.neutron [req-fc25c465-75a1-49d3-988b-833266f6386b req-4ef35a77-6284-49a0-96fe-214475020c56 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.737276] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1801.738320] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1801.738320] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1801.738320] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1801.738320] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1801.738320] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1801.738640] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1801.738640] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1801.738742] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1801.738913] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1801.739105] env[62510]: DEBUG nova.virt.hardware [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1801.740524] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bd33f6-0310-4cfa-8887-0d6048b35aa8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.748125] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1801.748397] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1801.748587] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Deleting the datastore file [datastore1] 1dc9e3b6-5e75-49b4-aef0-01200fb9be47 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1801.749601] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7257318e-6d99-4f85-b25c-61d1e74c5964 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.755918] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986bc1f4-d46c-4ecb-b7ce-0bff5f640af0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.762637] env[62510]: DEBUG oslo_vmware.api [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for the task: (returnval){ [ 1801.762637] env[62510]: value = "task-1769236" [ 1801.762637] env[62510]: _type = "Task" [ 1801.762637] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.782792] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b781db10-2581-40c9-9ad1-9e70bbdb1fce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.788971] env[62510]: DEBUG oslo_vmware.api [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769236, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.795049] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede7fa47-4b4a-475d-a909-1e43958a7da7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.833537] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da58c4a-8888-4367-b303-8384c1446583 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.840860] env[62510]: INFO nova.compute.manager [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Took 32.64 seconds to build instance. [ 1801.848046] env[62510]: DEBUG nova.network.neutron [-] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.848715] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee5adf0-29e3-4062-9530-2204c082b4dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.869035] env[62510]: DEBUG nova.compute.provider_tree [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1801.890880] env[62510]: DEBUG nova.compute.manager [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1801.891107] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1801.891423] env[62510]: DEBUG oslo_vmware.api [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Task: {'id': task-1769234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.460781} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.892610] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aaefca3-e1bf-4043-9ded-867cf7788bbe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.895021] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1801.895219] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1801.895397] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1801.895569] env[62510]: INFO nova.compute.manager [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1801.895811] env[62510]: DEBUG oslo.service.loopingcall [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1801.896386] env[62510]: DEBUG nova.compute.manager [-] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1801.896447] env[62510]: DEBUG nova.network.neutron [-] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1801.907059] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1801.907059] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ececd235-070c-4d5c-af17-416d6ae5f762 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.912938] env[62510]: DEBUG oslo_vmware.api [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1801.912938] env[62510]: value = "task-1769237" [ 1801.912938] env[62510]: _type = "Task" [ 1801.912938] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.923337] env[62510]: DEBUG oslo_vmware.api [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.961417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.961417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.961667] env[62510]: DEBUG nova.network.neutron [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1801.961714] env[62510]: DEBUG nova.objects.instance [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'info_cache' on Instance uuid f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.996777] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1802.000790] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6cd8e9bb-f4b7-4ec1-8098-42d82a42f4b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.017951] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1802.017951] env[62510]: value = "task-1769238" [ 1802.017951] env[62510]: _type = "Task" [ 1802.017951] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.029637] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769238, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.059138] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eccf11-0b55-45ac-3daa-a5a0bd7b61c3/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1802.059513] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1d04a5-f8a7-42bc-abb9-2c5e98ab068b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.067068] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eccf11-0b55-45ac-3daa-a5a0bd7b61c3/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1802.067482] env[62510]: ERROR oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eccf11-0b55-45ac-3daa-a5a0bd7b61c3/disk-0.vmdk due to incomplete transfer. [ 1802.067852] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1bf4419b-6a42-493f-bc2d-70e4dd7e59ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.079698] env[62510]: DEBUG oslo_vmware.rw_handles [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52eccf11-0b55-45ac-3daa-a5a0bd7b61c3/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1802.079698] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Uploaded image b835e517-0008-401a-949d-3c37f4e75c37 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1802.081187] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1802.082370] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-470cd63a-8acf-4d39-ba0e-2d3b19006f86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.091745] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1802.091745] env[62510]: value = "task-1769239" [ 1802.091745] env[62510]: _type = "Task" [ 1802.091745] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.096979] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-19ad8f3d-cc51-441e-862f-31fabe6277ae" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.096979] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-19ad8f3d-cc51-441e-862f-31fabe6277ae" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.102126] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769239, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.212134] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0458966b-6c34-4852-8d66-f8b69a0a3e97 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.224636] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b093424f-f3b4-4967-9573-91edf12ddef8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.272070] env[62510]: DEBUG nova.compute.manager [req-fc25c465-75a1-49d3-988b-833266f6386b req-4ef35a77-6284-49a0-96fe-214475020c56 service nova] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Detach interface failed, port_id=c380698b-8ec4-4110-a0c8-89549fc49f68, reason: Instance 9373089f-dbd4-4ac9-8736-e4c929fe6fb0 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1802.284086] env[62510]: DEBUG oslo_vmware.api [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Task: {'id': task-1769236, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.428049} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.285272] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1802.285914] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1802.286229] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1802.286498] env[62510]: INFO nova.compute.manager [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1802.287427] env[62510]: DEBUG oslo.service.loopingcall [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.288994] env[62510]: DEBUG nova.compute.manager [-] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1802.289120] env[62510]: DEBUG nova.network.neutron [-] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1802.343283] env[62510]: INFO nova.compute.manager [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Rescuing [ 1802.343644] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.343842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.344117] env[62510]: DEBUG nova.network.neutron [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1802.345656] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c0432f4-282d-462b-9cbd-52d09beab02d tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.155s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.355405] env[62510]: INFO nova.compute.manager [-] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Took 1.68 seconds to deallocate network for instance. [ 1802.389351] env[62510]: ERROR nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [req-68cb19fc-4e9b-4822-8908-de4c1a5a98b0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-68cb19fc-4e9b-4822-8908-de4c1a5a98b0"}]} [ 1802.408076] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1802.424114] env[62510]: DEBUG oslo_vmware.api [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769237, 'name': PowerOffVM_Task, 'duration_secs': 0.392346} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.425086] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1802.425306] env[62510]: DEBUG nova.compute.provider_tree [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1802.427977] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1802.427977] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1802.428222] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbceb6f3-80ca-44d8-917f-e011dada908b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.442017] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1802.454023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "ebd2dc4b-8d74-47db-861e-870d41a4150b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.454298] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1802.459191] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1802.465277] env[62510]: DEBUG nova.objects.base [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1802.540354] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769238, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.546804] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1802.547202] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1802.547742] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleting the datastore file [datastore1] 77f485ae-9c4c-424e-8bac-6d023e428767 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1802.547809] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd875b82-8ae3-475c-837b-d0062e7959f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.556630] env[62510]: DEBUG oslo_vmware.api [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1802.556630] env[62510]: value = "task-1769241" [ 1802.556630] env[62510]: _type = "Task" [ 1802.556630] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.570753] env[62510]: DEBUG oslo_vmware.api [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.604880] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1802.604880] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1802.605149] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769239, 'name': Destroy_Task, 'duration_secs': 0.496439} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.612114] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb01907-1baa-4898-a449-57d8f80772aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.615817] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Destroyed the VM [ 1802.616114] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1802.620189] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-111106db-50f5-48b5-89da-9885b5785524 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.646964] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8add1905-75b5-4c08-bccb-abb3deaef610 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.650173] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1802.650173] env[62510]: value = "task-1769242" [ 1802.650173] env[62510]: _type = "Task" [ 1802.650173] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.680831] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfiguring VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1802.685017] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a097588-c88f-4f3e-bb09-12c568cd89d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.706638] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769242, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.714053] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1802.714053] env[62510]: value = "task-1769243" [ 1802.714053] env[62510]: _type = "Task" [ 1802.714053] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.727621] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.786075] env[62510]: DEBUG nova.network.neutron [-] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.797217] env[62510]: INFO nova.compute.manager [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Rebuilding instance [ 1802.844739] env[62510]: DEBUG nova.compute.manager [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1802.845678] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0603330-77bb-41ce-af24-6f8df1bf7789 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.865365] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.907321] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392342bc-41d7-4aef-820f-4bb0b2ea7309 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.916651] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5d7294-efc3-4271-8852-73c88f5df6aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.950367] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2ad4a8-3004-4a0d-9ac5-7306208f3e79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.958380] env[62510]: DEBUG nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1802.961994] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45215aa8-74d0-4a6e-a0ab-fe3174b0847a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.977691] env[62510]: DEBUG nova.compute.provider_tree [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1803.035321] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769238, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.071237] env[62510]: DEBUG oslo_vmware.api [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280241} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.071591] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1803.071789] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1803.071966] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1803.072377] env[62510]: INFO nova.compute.manager [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1803.072628] env[62510]: DEBUG oslo.service.loopingcall [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1803.072853] env[62510]: DEBUG nova.compute.manager [-] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1803.072957] env[62510]: DEBUG nova.network.neutron [-] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1803.123591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.123869] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.124092] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "e7daad63-c802-4a86-bead-7e849064ed61-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.124278] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.124448] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.127405] env[62510]: INFO nova.compute.manager [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Terminating instance [ 1803.163543] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769242, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.229970] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.232882] env[62510]: DEBUG nova.network.neutron [-] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.289018] env[62510]: INFO nova.compute.manager [-] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Took 1.39 seconds to deallocate network for instance. [ 1803.323095] env[62510]: DEBUG nova.network.neutron [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Updating instance_info_cache with network_info: [{"id": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "address": "fa:16:3e:97:f9:1a", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3047f95-b7", "ovs_interfaceid": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.354754] env[62510]: DEBUG nova.compute.manager [req-645b0d7c-8337-47bb-b1e7-32d2a5c33ff5 req-f0868087-c0f4-4dd1-94b7-3ba92ef7b567 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Received event network-vif-plugged-ca1200b2-6f64-4952-a587-f2fdb0fc14d1 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1803.355266] env[62510]: DEBUG oslo_concurrency.lockutils [req-645b0d7c-8337-47bb-b1e7-32d2a5c33ff5 req-f0868087-c0f4-4dd1-94b7-3ba92ef7b567 service nova] Acquiring lock "9fe592c1-e23a-46d5-8952-c181709d93e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.356071] env[62510]: DEBUG oslo_concurrency.lockutils [req-645b0d7c-8337-47bb-b1e7-32d2a5c33ff5 req-f0868087-c0f4-4dd1-94b7-3ba92ef7b567 service nova] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.356331] env[62510]: DEBUG oslo_concurrency.lockutils [req-645b0d7c-8337-47bb-b1e7-32d2a5c33ff5 req-f0868087-c0f4-4dd1-94b7-3ba92ef7b567 service nova] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.356885] env[62510]: DEBUG nova.compute.manager [req-645b0d7c-8337-47bb-b1e7-32d2a5c33ff5 req-f0868087-c0f4-4dd1-94b7-3ba92ef7b567 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] No waiting events found dispatching network-vif-plugged-ca1200b2-6f64-4952-a587-f2fdb0fc14d1 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1803.357223] env[62510]: WARNING nova.compute.manager [req-645b0d7c-8337-47bb-b1e7-32d2a5c33ff5 req-f0868087-c0f4-4dd1-94b7-3ba92ef7b567 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Received unexpected event network-vif-plugged-ca1200b2-6f64-4952-a587-f2fdb0fc14d1 for instance with vm_state building and task_state spawning. [ 1803.377066] env[62510]: DEBUG nova.network.neutron [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.485481] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.529255] env[62510]: DEBUG nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1803.529678] env[62510]: DEBUG nova.compute.provider_tree [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 120 to 121 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1803.530190] env[62510]: DEBUG nova.compute.provider_tree [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1803.545133] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769238, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.560014] env[62510]: DEBUG nova.compute.manager [req-aa5ca7f5-e5a2-4bf8-a49c-f4e12d2279d7 req-875ab006-243f-440b-b2ca-a30d77e72bee service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Received event network-vif-deleted-9174aa7f-56a1-4625-be49-9a7f645e961b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1803.560325] env[62510]: INFO nova.compute.manager [req-aa5ca7f5-e5a2-4bf8-a49c-f4e12d2279d7 req-875ab006-243f-440b-b2ca-a30d77e72bee service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Neutron deleted interface 9174aa7f-56a1-4625-be49-9a7f645e961b; detaching it from the instance and deleting it from the info cache [ 1803.560460] env[62510]: DEBUG nova.network.neutron [req-aa5ca7f5-e5a2-4bf8-a49c-f4e12d2279d7 req-875ab006-243f-440b-b2ca-a30d77e72bee service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.632860] env[62510]: DEBUG nova.compute.manager [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1803.633687] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1803.634115] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ed816e-cc91-4d25-9ef2-0ddcbfd28498 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.643804] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1803.644035] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fe0bf4a-4bf3-4f52-8cf9-caa7404e72fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.653097] env[62510]: DEBUG oslo_vmware.api [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1803.653097] env[62510]: value = "task-1769244" [ 1803.653097] env[62510]: _type = "Task" [ 1803.653097] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.670536] env[62510]: DEBUG oslo_vmware.api [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.670839] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769242, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.728815] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.735480] env[62510]: INFO nova.compute.manager [-] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Took 1.45 seconds to deallocate network for instance. [ 1803.743297] env[62510]: DEBUG nova.compute.manager [req-e6c98acd-f5da-4753-b3ca-db15eae042fa req-68ec952b-2819-460d-8b51-08ebfea0da34 service nova] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Received event network-vif-deleted-82d9ee8a-e0e1-4753-8f1f-bb68c1a6d745 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1803.743437] env[62510]: DEBUG nova.compute.manager [req-e6c98acd-f5da-4753-b3ca-db15eae042fa req-68ec952b-2819-460d-8b51-08ebfea0da34 service nova] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Received event network-vif-deleted-89aed103-9d06-4efa-9cf0-72267b06a41d {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1803.797336] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.825882] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.853299] env[62510]: DEBUG nova.network.neutron [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Successfully updated port: ca1200b2-6f64-4952-a587-f2fdb0fc14d1 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1803.862943] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1803.863531] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9cabbb79-59af-4961-a119-e96393034419 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.871686] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1803.871686] env[62510]: value = "task-1769245" [ 1803.871686] env[62510]: _type = "Task" [ 1803.871686] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.883677] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1803.885246] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769245, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.033202] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769238, 'name': CloneVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.043023] env[62510]: DEBUG nova.network.neutron [-] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.043023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.399s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.044707] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.584s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.054018] env[62510]: INFO nova.compute.claims [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1804.064733] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eadf38c2-1c81-4759-9059-90c42d9c7964 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.079761] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e50c97-fd10-4f05-94ae-9837cf04a305 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.093064] env[62510]: INFO nova.scheduler.client.report [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted allocations for instance 16b5d928-94fe-4fd5-9909-775c28d7edd2 [ 1804.127629] env[62510]: DEBUG nova.compute.manager [req-aa5ca7f5-e5a2-4bf8-a49c-f4e12d2279d7 req-875ab006-243f-440b-b2ca-a30d77e72bee service nova] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Detach interface failed, port_id=9174aa7f-56a1-4625-be49-9a7f645e961b, reason: Instance 77f485ae-9c4c-424e-8bac-6d023e428767 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1804.176620] env[62510]: DEBUG oslo_vmware.api [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769242, 'name': RemoveSnapshot_Task, 'duration_secs': 1.528608} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.176954] env[62510]: DEBUG oslo_vmware.api [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769244, 'name': PowerOffVM_Task, 'duration_secs': 0.218472} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.177652] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1804.177956] env[62510]: INFO nova.compute.manager [None req-c062c74d-e4ec-4018-bffe-c4460cd285a5 tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Took 17.78 seconds to snapshot the instance on the hypervisor. [ 1804.181140] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1804.181383] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1804.181899] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57c245b2-22b6-4380-a393-e72c110705a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.232711] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.242415] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.290072] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1804.290365] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1804.290521] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleting the datastore file [datastore1] e7daad63-c802-4a86-bead-7e849064ed61 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1804.290836] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6a047f7-05a1-4a79-80b6-3d4be17d438a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.299207] env[62510]: DEBUG oslo_vmware.api [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for the task: (returnval){ [ 1804.299207] env[62510]: value = "task-1769247" [ 1804.299207] env[62510]: _type = "Task" [ 1804.299207] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.307800] env[62510]: DEBUG oslo_vmware.api [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769247, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.357655] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "refresh_cache-9fe592c1-e23a-46d5-8952-c181709d93e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.357794] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "refresh_cache-9fe592c1-e23a-46d5-8952-c181709d93e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.357959] env[62510]: DEBUG nova.network.neutron [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1804.385897] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769245, 'name': PowerOffVM_Task, 'duration_secs': 0.25188} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.385897] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1804.385897] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1804.385897] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c817db-c175-407a-952e-b8d310325660 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.396661] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1804.397156] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fabbe73b-7b1e-456a-9f9b-58e18c015945 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.424526] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1804.424805] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1804.425883] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleting the datastore file [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1804.425883] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53f75563-dacf-462d-a842-377b8fa86cb4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.434917] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1804.434917] env[62510]: value = "task-1769249" [ 1804.434917] env[62510]: _type = "Task" [ 1804.434917] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.444817] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769249, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.542520] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769238, 'name': CloneVM_Task, 'duration_secs': 2.116308} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.542855] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Created linked-clone VM from snapshot [ 1804.543832] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5bf269-390d-42d2-8047-ab6b58934621 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.547357] env[62510]: INFO nova.compute.manager [-] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Took 1.47 seconds to deallocate network for instance. [ 1804.562460] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Uploading image 2bd1ac76-8500-47cd-b52c-8213b9025fc0 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1804.596017] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1804.596017] env[62510]: value = "vm-367433" [ 1804.596017] env[62510]: _type = "VirtualMachine" [ 1804.596017] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1804.596307] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ed20bd70-ff62-4ce6-81e8-3e876b9b1819 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.605408] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease: (returnval){ [ 1804.605408] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223754b-0d96-f245-85bf-f7538641fd51" [ 1804.605408] env[62510]: _type = "HttpNfcLease" [ 1804.605408] env[62510]: } obtained for exporting VM: (result){ [ 1804.605408] env[62510]: value = "vm-367433" [ 1804.605408] env[62510]: _type = "VirtualMachine" [ 1804.605408] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1804.605408] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the lease: (returnval){ [ 1804.605408] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223754b-0d96-f245-85bf-f7538641fd51" [ 1804.605408] env[62510]: _type = "HttpNfcLease" [ 1804.605408] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1804.605751] env[62510]: DEBUG oslo_concurrency.lockutils [None req-da255b02-2770-42c3-bfec-0b6bae2ee1b9 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "16b5d928-94fe-4fd5-9909-775c28d7edd2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.410s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.614034] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1804.614034] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223754b-0d96-f245-85bf-f7538641fd51" [ 1804.614034] env[62510]: _type = "HttpNfcLease" [ 1804.614034] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1804.730143] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.811868] env[62510]: DEBUG oslo_vmware.api [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Task: {'id': task-1769247, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14449} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.812493] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1804.812695] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1804.812871] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1804.813060] env[62510]: INFO nova.compute.manager [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1804.813315] env[62510]: DEBUG oslo.service.loopingcall [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1804.813524] env[62510]: DEBUG nova.compute.manager [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1804.813693] env[62510]: DEBUG nova.network.neutron [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1804.895603] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1804.895603] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4a0c2ff-81a8-42c5-b30b-37e40238f2e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.901825] env[62510]: DEBUG nova.network.neutron [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1804.904892] env[62510]: DEBUG oslo_vmware.api [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1804.904892] env[62510]: value = "task-1769251" [ 1804.904892] env[62510]: _type = "Task" [ 1804.904892] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.915297] env[62510]: DEBUG oslo_vmware.api [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.947736] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769249, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110397} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.948121] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1804.948423] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1804.948655] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1805.065934] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.071877] env[62510]: DEBUG nova.network.neutron [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Updating instance_info_cache with network_info: [{"id": "ca1200b2-6f64-4952-a587-f2fdb0fc14d1", "address": "fa:16:3e:21:ea:1a", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1200b2-6f", "ovs_interfaceid": "ca1200b2-6f64-4952-a587-f2fdb0fc14d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.121178] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1805.121178] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223754b-0d96-f245-85bf-f7538641fd51" [ 1805.121178] env[62510]: _type = "HttpNfcLease" [ 1805.121178] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1805.121522] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1805.121522] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223754b-0d96-f245-85bf-f7538641fd51" [ 1805.121522] env[62510]: _type = "HttpNfcLease" [ 1805.121522] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1805.123204] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867fa287-6380-4685-88ea-05d80c9a1a3f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.132337] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4ee2a-794c-9db9-2879-37c3a97e31e2/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1805.132524] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4ee2a-794c-9db9-2879-37c3a97e31e2/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1805.230322] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a69de353-04e5-4786-8cbe-867cf8d19002 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.236680] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.296024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.297171] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.297171] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.297171] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.297171] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.299211] env[62510]: INFO nova.compute.manager [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Terminating instance [ 1805.379129] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1805.381351] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4ea0f2c-85a7-4607-b881-c0e59607a850 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.390530] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1805.390530] env[62510]: value = "task-1769252" [ 1805.390530] env[62510]: _type = "Task" [ 1805.390530] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.400523] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769252, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.418228] env[62510]: DEBUG oslo_vmware.api [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769251, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.493636] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8783271-8a02-4573-8a81-03845a2127ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.502026] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcfac63-75b4-4ed4-a94a-b5d48772f713 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.541476] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f4f5db-73a4-4a82-ae56-9acf7a1ba86d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.552297] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc52f469-f0e7-494e-a38d-62d273ba7504 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.574293] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "refresh_cache-9fe592c1-e23a-46d5-8952-c181709d93e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.574818] env[62510]: DEBUG nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Instance network_info: |[{"id": "ca1200b2-6f64-4952-a587-f2fdb0fc14d1", "address": "fa:16:3e:21:ea:1a", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1200b2-6f", "ovs_interfaceid": "ca1200b2-6f64-4952-a587-f2fdb0fc14d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1805.575445] env[62510]: DEBUG nova.compute.provider_tree [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.578260] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:ea:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e4e52d8a-b086-4333-a5a1-938680a2d2bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca1200b2-6f64-4952-a587-f2fdb0fc14d1', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1805.586886] env[62510]: DEBUG oslo.service.loopingcall [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.588375] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1805.588781] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84c35c00-04fc-4f22-a710-7964cf9ed289 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.612175] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1805.612175] env[62510]: value = "task-1769253" [ 1805.612175] env[62510]: _type = "Task" [ 1805.612175] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.623419] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769253, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.714045] env[62510]: DEBUG nova.network.neutron [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.731650] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.773867] env[62510]: DEBUG nova.compute.manager [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Received event network-changed-ca1200b2-6f64-4952-a587-f2fdb0fc14d1 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1805.774200] env[62510]: DEBUG nova.compute.manager [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Refreshing instance network info cache due to event network-changed-ca1200b2-6f64-4952-a587-f2fdb0fc14d1. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1805.774428] env[62510]: DEBUG oslo_concurrency.lockutils [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] Acquiring lock "refresh_cache-9fe592c1-e23a-46d5-8952-c181709d93e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.774597] env[62510]: DEBUG oslo_concurrency.lockutils [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] Acquired lock "refresh_cache-9fe592c1-e23a-46d5-8952-c181709d93e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.774829] env[62510]: DEBUG nova.network.neutron [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Refreshing network info cache for port ca1200b2-6f64-4952-a587-f2fdb0fc14d1 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1805.803333] env[62510]: DEBUG nova.compute.manager [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1805.803614] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1805.804836] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f6ca1a-a8e0-48fe-8cd0-6afa1c52ce73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.813261] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1805.813658] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fbae8c61-36ab-4b87-9cc7-c14cb47392a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.820785] env[62510]: DEBUG oslo_vmware.api [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1805.820785] env[62510]: value = "task-1769254" [ 1805.820785] env[62510]: _type = "Task" [ 1805.820785] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.830935] env[62510]: DEBUG oslo_vmware.api [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.904464] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769252, 'name': PowerOffVM_Task, 'duration_secs': 0.233148} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.904659] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1805.905966] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9ff9a2-ab45-4f40-bdd6-1a892ae94108 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.922873] env[62510]: DEBUG oslo_vmware.api [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769251, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.939135] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1eba93f-3644-4a4c-a6fe-9338eb97ce11 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.984438] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1805.986529] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-08588737-417c-4cf5-8129-79e6596b6be8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.996589] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1805.996589] env[62510]: value = "task-1769255" [ 1805.996589] env[62510]: _type = "Task" [ 1805.996589] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.999370] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1805.999775] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1805.999942] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1806.000775] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1806.000959] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1806.001144] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1806.001380] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1806.001644] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1806.001839] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1806.002111] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1806.002361] env[62510]: DEBUG nova.virt.hardware [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1806.003335] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b55f3e9-f569-41bd-b793-64888013ffee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.021522] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1806.021919] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1806.022230] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.022652] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.022945] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1806.023339] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27f8d88b-8875-4848-bf5e-2a49e2f9fc8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.027173] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d83d5cf-c32d-4ae9-8009-9c6b99a40683 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.045319] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1806.052343] env[62510]: DEBUG oslo.service.loopingcall [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1806.055207] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1806.055207] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1806.055207] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1806.055702] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aae3e29e-849e-4079-bd66-26640ed92950 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.068519] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f07650bc-62fd-4788-b69b-a376ed4d4464 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.075602] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1806.075602] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529d000b-1ff8-6505-469a-82d691439f6b" [ 1806.075602] env[62510]: _type = "Task" [ 1806.075602] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.077110] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1806.077110] env[62510]: value = "task-1769256" [ 1806.077110] env[62510]: _type = "Task" [ 1806.077110] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.092818] env[62510]: DEBUG nova.scheduler.client.report [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1806.097354] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769256, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.097900] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529d000b-1ff8-6505-469a-82d691439f6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.127824] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769253, 'name': CreateVM_Task, 'duration_secs': 0.451809} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.127824] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1806.127824] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.127824] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.128504] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1806.128820] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e48f5cb5-6579-43e9-abe9-dbcc077c3faf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.135197] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1806.135197] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522a7052-81f8-8645-a7a7-455334a4dc63" [ 1806.135197] env[62510]: _type = "Task" [ 1806.135197] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.146416] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522a7052-81f8-8645-a7a7-455334a4dc63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.217260] env[62510]: INFO nova.compute.manager [-] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Took 1.40 seconds to deallocate network for instance. [ 1806.234269] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.335670] env[62510]: DEBUG oslo_vmware.api [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769254, 'name': PowerOffVM_Task, 'duration_secs': 0.244642} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.336041] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1806.336326] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1806.336956] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f64c0635-fa1b-4b1c-b599-5e787645914d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.421361] env[62510]: DEBUG oslo_vmware.api [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769251, 'name': PowerOnVM_Task, 'duration_secs': 1.439498} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.421932] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1806.426019] env[62510]: DEBUG nova.compute.manager [None req-3ca2393a-be10-43c7-b771-f3e6630e3b6a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1806.426019] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bac506c-9756-4cac-bfc0-4ea0f466ab57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.507942] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1806.508297] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1806.508710] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Deleting the datastore file [datastore1] 91a76cc7-7f82-42cf-a379-fc0ba3d04568 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1806.508851] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9982db23-f3fe-4382-a862-877aea40541b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.519748] env[62510]: DEBUG oslo_vmware.api [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for the task: (returnval){ [ 1806.519748] env[62510]: value = "task-1769258" [ 1806.519748] env[62510]: _type = "Task" [ 1806.519748] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.533374] env[62510]: DEBUG oslo_vmware.api [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.589993] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769256, 'name': CreateVM_Task, 'duration_secs': 0.331906} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.595159] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1806.595924] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529d000b-1ff8-6505-469a-82d691439f6b, 'name': SearchDatastore_Task, 'duration_secs': 0.014549} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.596528] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.597933] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26aca235-614d-4c31-8fdb-a2e06a269331 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.603027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.603027] env[62510]: DEBUG nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1806.605566] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.257s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.605883] env[62510]: DEBUG nova.objects.instance [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'resources' on Instance uuid b7ffe11f-2f63-419b-9ad8-0a89a05d201c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1806.616337] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1806.616337] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52028149-a37d-a1e2-8c42-98da9c62da9c" [ 1806.616337] env[62510]: _type = "Task" [ 1806.616337] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.627151] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52028149-a37d-a1e2-8c42-98da9c62da9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.646046] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522a7052-81f8-8645-a7a7-455334a4dc63, 'name': SearchDatastore_Task, 'duration_secs': 0.027154} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.646526] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.646879] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1806.647196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.647514] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.648190] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1806.649933] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6ee617c-0970-4093-9f9b-810b35061720 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.657227] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1806.657227] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5276b579-f04e-ce4a-583f-2b4f7444a959" [ 1806.657227] env[62510]: _type = "Task" [ 1806.657227] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.666858] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5276b579-f04e-ce4a-583f-2b4f7444a959, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.731584] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.739890] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.898032] env[62510]: DEBUG nova.network.neutron [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Updated VIF entry in instance network info cache for port ca1200b2-6f64-4952-a587-f2fdb0fc14d1. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1806.898472] env[62510]: DEBUG nova.network.neutron [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Updating instance_info_cache with network_info: [{"id": "ca1200b2-6f64-4952-a587-f2fdb0fc14d1", "address": "fa:16:3e:21:ea:1a", "network": {"id": "925f8c0b-2409-4eca-9a68-c5b357835972", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2008838096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d98518565b744451ba90ba301267213f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e4e52d8a-b086-4333-a5a1-938680a2d2bd", "external-id": "nsx-vlan-transportzone-973", "segmentation_id": 973, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca1200b2-6f", "ovs_interfaceid": "ca1200b2-6f64-4952-a587-f2fdb0fc14d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.035871] env[62510]: DEBUG oslo_vmware.api [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Task: {'id': task-1769258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.330665} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.036224] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1807.036504] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1807.037122] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1807.037122] env[62510]: INFO nova.compute.manager [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1807.038540] env[62510]: DEBUG oslo.service.loopingcall [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1807.038540] env[62510]: DEBUG nova.compute.manager [-] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1807.038540] env[62510]: DEBUG nova.network.neutron [-] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1807.116853] env[62510]: DEBUG nova.compute.utils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1807.123204] env[62510]: DEBUG nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1807.123415] env[62510]: DEBUG nova.network.neutron [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1807.141069] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52028149-a37d-a1e2-8c42-98da9c62da9c, 'name': SearchDatastore_Task, 'duration_secs': 0.015896} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.141456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.141658] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. {{(pid=62510) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1807.141952] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.142162] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.142380] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f2dbefa-4122-461f-84bc-3116668a2f54 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.152051] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45e5d46e-7e4f-474c-b2f8-89b7b47543a4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.166327] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.166698] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.167794] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1807.167794] env[62510]: value = "task-1769259" [ 1807.167794] env[62510]: _type = "Task" [ 1807.167794] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.168890] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bddb7f18-425b-4132-be3e-e075fba13e2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.181751] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5276b579-f04e-ce4a-583f-2b4f7444a959, 'name': SearchDatastore_Task, 'duration_secs': 0.018763} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.183244] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.183373] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1807.183646] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.186885] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1807.186885] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525b9a24-a5ed-02de-cb7d-218d51164028" [ 1807.186885] env[62510]: _type = "Task" [ 1807.186885] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.193837] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.204838] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525b9a24-a5ed-02de-cb7d-218d51164028, 'name': SearchDatastore_Task, 'duration_secs': 0.011951} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.205743] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6aeb784-318e-4488-94a0-c0566adc18f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.213461] env[62510]: DEBUG nova.policy [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbc6eced57ea45fdafc3635a58fb3611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f878b652f01c48139bfc6996e5e32f5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1807.219978] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1807.219978] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d21769-b1c6-1b58-9d6f-67e26cbdda1e" [ 1807.219978] env[62510]: _type = "Task" [ 1807.219978] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.235441] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d21769-b1c6-1b58-9d6f-67e26cbdda1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.244905] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.401238] env[62510]: DEBUG oslo_concurrency.lockutils [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] Releasing lock "refresh_cache-9fe592c1-e23a-46d5-8952-c181709d93e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.401587] env[62510]: DEBUG nova.compute.manager [req-210dce14-b30b-40ae-beb2-e9307a297b05 req-3c66e8eb-8893-4e16-ac20-d0e84266a094 service nova] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Received event network-vif-deleted-13fb40b1-132b-407d-b6e0-eec141ae88a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1807.591404] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda961de-08ad-47a8-9842-a2c191b2961e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.604181] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855b00c4-3a5c-45ff-9e8e-aba6fdb6a62e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.642368] env[62510]: DEBUG nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1807.644116] env[62510]: DEBUG nova.network.neutron [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Successfully created port: b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1807.647294] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f47675a-64eb-4689-8e35-d5d02f7bcd13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.657573] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2122c377-f37d-40be-b3da-7f1cf8797b94 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.680456] env[62510]: DEBUG nova.compute.provider_tree [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.693476] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769259, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.739476] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d21769-b1c6-1b58-9d6f-67e26cbdda1e, 'name': SearchDatastore_Task, 'duration_secs': 0.01072} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.742970] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.743341] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9fe592c1-e23a-46d5-8952-c181709d93e7/9fe592c1-e23a-46d5-8952-c181709d93e7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1807.743701] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.743878] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.744183] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.744436] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8680d507-e7f1-451e-9872-eafc66705722 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.746586] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5943d8b-e3c3-45e2-b187-28e36eaf88db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.757110] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1807.757110] env[62510]: value = "task-1769260" [ 1807.757110] env[62510]: _type = "Task" [ 1807.757110] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.765285] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.765285] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.765285] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5b6345e-455a-4e63-91f4-6619c30f21c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.773603] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.777921] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1807.777921] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a1b6f7-f12c-d10f-7d7b-680a675c727a" [ 1807.777921] env[62510]: _type = "Task" [ 1807.777921] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.787566] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a1b6f7-f12c-d10f-7d7b-680a675c727a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.806954] env[62510]: DEBUG nova.compute.manager [req-d25160e7-510c-4207-8cb3-1e8dd05fd1ad req-a1942ca3-5545-424c-bd0d-4be0ea5b38b6 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Received event network-vif-deleted-9331bfef-61be-4509-82fc-39111423b497 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1807.807037] env[62510]: INFO nova.compute.manager [req-d25160e7-510c-4207-8cb3-1e8dd05fd1ad req-a1942ca3-5545-424c-bd0d-4be0ea5b38b6 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Neutron deleted interface 9331bfef-61be-4509-82fc-39111423b497; detaching it from the instance and deleting it from the info cache [ 1807.807242] env[62510]: DEBUG nova.network.neutron [req-d25160e7-510c-4207-8cb3-1e8dd05fd1ad req-a1942ca3-5545-424c-bd0d-4be0ea5b38b6 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.957740] env[62510]: DEBUG nova.network.neutron [-] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.190529] env[62510]: DEBUG nova.scheduler.client.report [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1808.202479] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769259, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547256} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.204130] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. [ 1808.208167] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0ffc2a-b7b3-4c90-aedf-df4efbf1d15b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.239890] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1808.249022] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4cf60909-4a3d-4316-8e37-e8ce0f014cb2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.272188] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.274331] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1808.274331] env[62510]: value = "task-1769261" [ 1808.274331] env[62510]: _type = "Task" [ 1808.274331] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.281627] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769260, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.289739] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769261, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.298141] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a1b6f7-f12c-d10f-7d7b-680a675c727a, 'name': SearchDatastore_Task, 'duration_secs': 0.016623} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.302356] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-319ca25f-6aa7-4fd5-8564-6fd04363e8e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.306894] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1808.306894] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f3ea1f-6a79-33f5-f899-90563661bf45" [ 1808.306894] env[62510]: _type = "Task" [ 1808.306894] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.311663] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d83e06e3-119b-4dc0-abde-e800ee59b294 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.320490] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f3ea1f-6a79-33f5-f899-90563661bf45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.324902] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86e0e09-24e3-46c0-8aa9-3574d74e6deb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.361471] env[62510]: DEBUG nova.compute.manager [req-d25160e7-510c-4207-8cb3-1e8dd05fd1ad req-a1942ca3-5545-424c-bd0d-4be0ea5b38b6 service nova] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Detach interface failed, port_id=9331bfef-61be-4509-82fc-39111423b497, reason: Instance 91a76cc7-7f82-42cf-a379-fc0ba3d04568 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1808.462785] env[62510]: INFO nova.compute.manager [-] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Took 1.42 seconds to deallocate network for instance. [ 1808.662109] env[62510]: DEBUG nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1808.691394] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1808.691725] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1808.691905] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1808.692114] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1808.692267] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1808.692417] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1808.692751] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1808.692940] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1808.693190] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1808.693411] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1808.693621] env[62510]: DEBUG nova.virt.hardware [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1808.694578] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dc666f-ac6e-4f07-8714-f9ebae5d59f5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.704323] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04497be2-fb58-4d20-96e2-5b8f4c6c9f9a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.710332] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.713018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.216s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.713294] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.713492] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1808.714773] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.611s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.714773] env[62510]: DEBUG nova.objects.instance [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lazy-loading 'resources' on Instance uuid dabc046f-10f5-43d8-90f8-507dcb4d0144 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.719051] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c1fa25-df82-4758-a764-8eddefe86cc5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.742949] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ee3de6-c275-4f23-b070-25d41c64fd00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.754473] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.755537] env[62510]: INFO nova.scheduler.client.report [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted allocations for instance b7ffe11f-2f63-419b-9ad8-0a89a05d201c [ 1808.774393] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7252ae2-5d6d-4694-91d2-f54dae216d1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.793918] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2111da66-b990-44ce-be2d-9d69b55335b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.798152] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769260, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612712} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.798413] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769261, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.798667] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9fe592c1-e23a-46d5-8952-c181709d93e7/9fe592c1-e23a-46d5-8952-c181709d93e7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1808.799500] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1808.799654] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1edd4780-51eb-426e-8296-787d2bc99fe5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.828351] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178723MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1808.828497] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.833305] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1808.833305] env[62510]: value = "task-1769262" [ 1808.833305] env[62510]: _type = "Task" [ 1808.833305] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.840684] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f3ea1f-6a79-33f5-f899-90563661bf45, 'name': SearchDatastore_Task, 'duration_secs': 0.042677} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.841343] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.841612] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1808.841881] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b4effe1-a24e-49d4-aa74-a81200ea32a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.847613] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769262, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.852797] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1808.852797] env[62510]: value = "task-1769263" [ 1808.852797] env[62510]: _type = "Task" [ 1808.852797] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.863849] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769263, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.972313] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.244894] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.273570] env[62510]: DEBUG oslo_concurrency.lockutils [None req-92b0b44e-1ae3-413c-a75c-2f7ec3c1792f tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "b7ffe11f-2f63-419b-9ad8-0a89a05d201c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.464s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.291662] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769261, 'name': ReconfigVM_Task, 'duration_secs': 0.518123} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.291946] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1809.293038] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc3b54f-216e-458c-973b-1db8f2be19ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.331347] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-905740e5-6063-4533-9aab-d885a039ef7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.356794] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769262, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.199727} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.362723] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1809.363243] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1809.363243] env[62510]: value = "task-1769264" [ 1809.363243] env[62510]: _type = "Task" [ 1809.363243] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.369435] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073f2e92-44b0-4a8b-b4b0-35b7b01e0fb6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.375553] env[62510]: DEBUG nova.compute.manager [req-515ca1bb-bcdc-4630-8317-f0c459dd43e5 req-768e9c12-f706-408b-8329-857daf59a1ca service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Received event network-vif-plugged-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1809.376140] env[62510]: DEBUG oslo_concurrency.lockutils [req-515ca1bb-bcdc-4630-8317-f0c459dd43e5 req-768e9c12-f706-408b-8329-857daf59a1ca service nova] Acquiring lock "313f7916-0737-4e44-ae2f-58301934bf06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.376140] env[62510]: DEBUG oslo_concurrency.lockutils [req-515ca1bb-bcdc-4630-8317-f0c459dd43e5 req-768e9c12-f706-408b-8329-857daf59a1ca service nova] Lock "313f7916-0737-4e44-ae2f-58301934bf06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.376463] env[62510]: DEBUG oslo_concurrency.lockutils [req-515ca1bb-bcdc-4630-8317-f0c459dd43e5 req-768e9c12-f706-408b-8329-857daf59a1ca service nova] Lock "313f7916-0737-4e44-ae2f-58301934bf06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.376698] env[62510]: DEBUG nova.compute.manager [req-515ca1bb-bcdc-4630-8317-f0c459dd43e5 req-768e9c12-f706-408b-8329-857daf59a1ca service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] No waiting events found dispatching network-vif-plugged-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1809.376836] env[62510]: WARNING nova.compute.manager [req-515ca1bb-bcdc-4630-8317-f0c459dd43e5 req-768e9c12-f706-408b-8329-857daf59a1ca service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Received unexpected event network-vif-plugged-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 for instance with vm_state building and task_state spawning. [ 1809.387420] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769263, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.395112] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769264, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.411104] env[62510]: DEBUG nova.network.neutron [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Successfully updated port: b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1809.422793] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 9fe592c1-e23a-46d5-8952-c181709d93e7/9fe592c1-e23a-46d5-8952-c181709d93e7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1809.424089] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44da4a4b-4374-4e2c-bf64-bcaf20123704 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.454836] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1809.454836] env[62510]: value = "task-1769265" [ 1809.454836] env[62510]: _type = "Task" [ 1809.454836] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.472313] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769265, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.695061] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5376b3d-dd8d-44b6-bcfd-330c5f8b8ccf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.704733] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4a813e-03fd-43de-aaeb-6b3b82b9c81b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.744204] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac5fe73-2b99-46c4-8791-6ec3bb025f36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.756123] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4227211-de88-438c-9eac-61758991e975 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.760336] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.776027] env[62510]: DEBUG nova.compute.provider_tree [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1809.866854] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769263, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61096} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.867180] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1809.867462] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1809.867868] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4be9d3c-ee3f-491f-9ef1-2e90e87272e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.879194] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1809.879194] env[62510]: value = "task-1769266" [ 1809.879194] env[62510]: _type = "Task" [ 1809.879194] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.886622] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769264, 'name': ReconfigVM_Task, 'duration_secs': 0.252301} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.887291] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1809.887569] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3a7b837-828d-4400-9769-4c48788af8fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.892909] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.895260] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1809.895260] env[62510]: value = "task-1769267" [ 1809.895260] env[62510]: _type = "Task" [ 1809.895260] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.904671] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769267, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.924182] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.924404] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.924600] env[62510]: DEBUG nova.network.neutron [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1809.965251] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769265, 'name': ReconfigVM_Task, 'duration_secs': 0.345591} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.965570] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 9fe592c1-e23a-46d5-8952-c181709d93e7/9fe592c1-e23a-46d5-8952-c181709d93e7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1809.966584] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26920f30-ca9f-4c39-81fd-2fdb5cf4de00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.978257] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1809.978257] env[62510]: value = "task-1769268" [ 1809.978257] env[62510]: _type = "Task" [ 1809.978257] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.988838] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769268, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.007705] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.007894] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.254870] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.301845] env[62510]: ERROR nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [req-682057fe-9f76-4325-aed5-303aa3441387] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-682057fe-9f76-4325-aed5-303aa3441387"}]} [ 1810.317996] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1810.331390] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1810.331782] env[62510]: DEBUG nova.compute.provider_tree [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1810.344600] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1810.362207] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1810.393017] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133915} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.393328] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1810.394144] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13cd46c9-c845-4b5c-96b1-f3bd16949384 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.425028] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1810.431826] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5f4733b-045f-4d6a-be62-aad7800f6ac0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.448516] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769267, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.455037] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1810.455037] env[62510]: value = "task-1769269" [ 1810.455037] env[62510]: _type = "Task" [ 1810.455037] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.465726] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769269, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.483372] env[62510]: DEBUG nova.network.neutron [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1810.494222] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769268, 'name': Rename_Task, 'duration_secs': 0.238918} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.494497] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1810.494788] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8dd1da75-37b1-4c15-aae0-281efb90a7d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.504191] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1810.504191] env[62510]: value = "task-1769270" [ 1810.504191] env[62510]: _type = "Task" [ 1810.504191] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.509989] env[62510]: DEBUG nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1810.519351] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769270, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.673511] env[62510]: DEBUG nova.network.neutron [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Updating instance_info_cache with network_info: [{"id": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "address": "fa:16:3e:cd:c0:e6", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8e2da8e-86", "ovs_interfaceid": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.754340] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.770198] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a11c1f-ae2d-4de0-bd74-d0e78a6b0311 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.778847] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aebc00-ebfc-4957-9bef-9203fb5f368d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.812419] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca6e2b1-44b0-41e8-b40e-6b000674d765 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.822024] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d3da52-9d86-4d87-b1ff-caae5d2e5dae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.839101] env[62510]: DEBUG nova.compute.provider_tree [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1810.845299] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "e7e053be-cb88-4ae0-b157-3006211f77d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.845698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.909080] env[62510]: DEBUG oslo_vmware.api [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769267, 'name': PowerOnVM_Task, 'duration_secs': 0.565051} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.909833] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1810.913174] env[62510]: DEBUG nova.compute.manager [None req-2f6f3629-e57d-46d4-ad1b-f34a2bd4c8ca tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1810.914098] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c2008c-f76d-41b0-b3b9-5dd57786afa6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.966694] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769269, 'name': ReconfigVM_Task, 'duration_secs': 0.451258} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.967069] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd/1e3e2044-a072-454f-85ba-5cb0bc36b5fd.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1810.967774] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51f30397-108b-4a8f-bbcd-e226699fd745 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.977366] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1810.977366] env[62510]: value = "task-1769271" [ 1810.977366] env[62510]: _type = "Task" [ 1810.977366] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.987708] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769271, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.014925] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769270, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.038111] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.175638] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.176052] env[62510]: DEBUG nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Instance network_info: |[{"id": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "address": "fa:16:3e:cd:c0:e6", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8e2da8e-86", "ovs_interfaceid": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1811.176477] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:c0:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8e2da8e-86d0-4706-bdbb-39da14ef3e15', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1811.186366] env[62510]: DEBUG oslo.service.loopingcall [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1811.186759] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1811.187131] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-162c8353-a093-4538-9893-d66e113e834e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.209999] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1811.209999] env[62510]: value = "task-1769272" [ 1811.209999] env[62510]: _type = "Task" [ 1811.209999] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.221832] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769272, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.255661] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.348662] env[62510]: DEBUG nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1811.360747] env[62510]: ERROR nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] [req-20dc34ff-05cc-4abe-a788-f331cba62364] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-20dc34ff-05cc-4abe-a788-f331cba62364"}]} [ 1811.379669] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1811.395727] env[62510]: DEBUG nova.compute.manager [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Received event network-changed-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1811.395984] env[62510]: DEBUG nova.compute.manager [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Refreshing instance network info cache due to event network-changed-b8e2da8e-86d0-4706-bdbb-39da14ef3e15. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1811.396281] env[62510]: DEBUG oslo_concurrency.lockutils [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] Acquiring lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.396717] env[62510]: DEBUG oslo_concurrency.lockutils [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] Acquired lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.396717] env[62510]: DEBUG nova.network.neutron [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Refreshing network info cache for port b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1811.400065] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1811.400343] env[62510]: DEBUG nova.compute.provider_tree [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1811.413272] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1811.433425] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1811.489729] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769271, 'name': Rename_Task, 'duration_secs': 0.246924} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.490050] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1811.490330] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2bb39b6-2ec9-4c5b-a95b-6dbdc754eb9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.500035] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1811.500035] env[62510]: value = "task-1769273" [ 1811.500035] env[62510]: _type = "Task" [ 1811.500035] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.508177] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.521609] env[62510]: DEBUG oslo_vmware.api [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769270, 'name': PowerOnVM_Task, 'duration_secs': 0.668503} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.522248] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1811.522461] env[62510]: INFO nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1811.522652] env[62510]: DEBUG nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1811.523455] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e782bfc-2b18-4cff-8f88-86c4942fc6f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.725232] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769272, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.753276] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.766518] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ca9471-75e9-4e27-a9a3-6becec8f5f53 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.774793] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ceab99a-70ab-4d5f-9d33-7ffed2b8b6ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.809119] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1ed048-ce61-425e-a650-b776a999e1a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.817463] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d830d7-6f77-45aa-bb39-a4cf95495160 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.832587] env[62510]: DEBUG nova.compute.provider_tree [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1811.868902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.945604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "c8e69231-2786-47ac-9a44-c194088b8079" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.945604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "c8e69231-2786-47ac-9a44-c194088b8079" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.945604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "c8e69231-2786-47ac-9a44-c194088b8079-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.945604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "c8e69231-2786-47ac-9a44-c194088b8079-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.945604] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "c8e69231-2786-47ac-9a44-c194088b8079-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.947909] env[62510]: INFO nova.compute.manager [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Terminating instance [ 1812.013944] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769273, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.055911] env[62510]: INFO nova.compute.manager [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Took 33.01 seconds to build instance. [ 1812.149715] env[62510]: DEBUG nova.network.neutron [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Updated VIF entry in instance network info cache for port b8e2da8e-86d0-4706-bdbb-39da14ef3e15. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1812.150333] env[62510]: DEBUG nova.network.neutron [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Updating instance_info_cache with network_info: [{"id": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "address": "fa:16:3e:cd:c0:e6", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8e2da8e-86", "ovs_interfaceid": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.221371] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769272, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.256751] env[62510]: DEBUG oslo_vmware.api [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769243, 'name': ReconfigVM_Task, 'duration_secs': 9.3595} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.257127] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.257382] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfigured VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1812.366863] env[62510]: DEBUG nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1812.367244] env[62510]: DEBUG nova.compute.provider_tree [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 123 to 124 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1812.367385] env[62510]: DEBUG nova.compute.provider_tree [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1812.454641] env[62510]: DEBUG nova.compute.manager [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1812.454975] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1812.455959] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1484664a-a313-4547-8382-60b46c575e75 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.465307] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1812.465696] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65dc8404-8c09-419b-99f4-6129d323d597 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.473679] env[62510]: DEBUG oslo_vmware.api [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1812.473679] env[62510]: value = "task-1769274" [ 1812.473679] env[62510]: _type = "Task" [ 1812.473679] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.485477] env[62510]: DEBUG oslo_vmware.api [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1769274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.503761] env[62510]: INFO nova.compute.manager [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Unrescuing [ 1812.503761] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.503761] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.503761] env[62510]: DEBUG nova.network.neutron [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1812.516527] env[62510]: DEBUG oslo_vmware.api [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769273, 'name': PowerOnVM_Task, 'duration_secs': 0.797878} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.516746] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1812.516961] env[62510]: DEBUG nova.compute.manager [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1812.517850] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a3812d-b0a4-4f83-b6cf-8356a4dc3f74 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.558274] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f8d8720d-b7e1-4df2-9eb2-8447361ba6a5 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.524s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.653860] env[62510]: DEBUG oslo_concurrency.lockutils [req-9cfe5e6b-04d8-4a67-b172-8cb092f3fc6f req-02041cc4-8dc1-436e-a263-6745cd3dd739 service nova] Releasing lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.724906] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769272, 'name': CreateVM_Task, 'duration_secs': 1.017508} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.725190] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1812.726031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.726164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.726570] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1812.726898] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49e17207-b686-43a1-9bc2-55297c6914d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.735220] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1812.735220] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ca43db-7266-2679-0ef8-77e74ca9cd2f" [ 1812.735220] env[62510]: _type = "Task" [ 1812.735220] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.748462] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ca43db-7266-2679-0ef8-77e74ca9cd2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.873452] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.159s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.876401] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.268s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.878525] env[62510]: INFO nova.compute.claims [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1812.899016] env[62510]: INFO nova.scheduler.client.report [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Deleted allocations for instance dabc046f-10f5-43d8-90f8-507dcb4d0144 [ 1812.985947] env[62510]: DEBUG oslo_vmware.api [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1769274, 'name': PowerOffVM_Task, 'duration_secs': 0.308368} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.985947] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1812.986162] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1812.987078] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e46226f0-3e57-4d6c-ab31-8d2eb99f27d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.039785] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.068359] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1813.068761] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1813.068881] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Deleting the datastore file [datastore1] c8e69231-2786-47ac-9a44-c194088b8079 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1813.069106] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-646840ba-217c-4bc2-b2b8-4f8495246594 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.076861] env[62510]: DEBUG oslo_vmware.api [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for the task: (returnval){ [ 1813.076861] env[62510]: value = "task-1769276" [ 1813.076861] env[62510]: _type = "Task" [ 1813.076861] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.086445] env[62510]: DEBUG oslo_vmware.api [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1769276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.249376] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ca43db-7266-2679-0ef8-77e74ca9cd2f, 'name': SearchDatastore_Task, 'duration_secs': 0.013726} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.252292] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.252538] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1813.252802] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.252928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.253121] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1813.253882] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23d07000-2993-43f5-8648-8aa8cb83d5f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.275630] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1813.275851] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1813.276662] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-626b0f08-6fa3-41ee-abf8-06d9b74b0329 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.286633] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1813.286633] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525ed02a-8d0c-d4a2-47a1-89da2324a889" [ 1813.286633] env[62510]: _type = "Task" [ 1813.286633] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.297160] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525ed02a-8d0c-d4a2-47a1-89da2324a889, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.398022] env[62510]: DEBUG nova.network.neutron [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Updating instance_info_cache with network_info: [{"id": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "address": "fa:16:3e:97:f9:1a", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3047f95-b7", "ovs_interfaceid": "d3047f95-b766-4344-bc0c-ad2f1b9f55fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.414445] env[62510]: DEBUG oslo_concurrency.lockutils [None req-34423eb1-902e-411d-b69d-49df49b27ae7 tempest-ServerMetadataNegativeTestJSON-1784127763 tempest-ServerMetadataNegativeTestJSON-1784127763-project-member] Lock "dabc046f-10f5-43d8-90f8-507dcb4d0144" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.040s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.520399] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4ee2a-794c-9db9-2879-37c3a97e31e2/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1813.521613] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1741f7c4-97f3-4b73-a038-60ad436a9bde {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.539838] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.539838] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.539838] env[62510]: DEBUG nova.network.neutron [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1813.545273] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4ee2a-794c-9db9-2879-37c3a97e31e2/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1813.545496] env[62510]: ERROR oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4ee2a-794c-9db9-2879-37c3a97e31e2/disk-0.vmdk due to incomplete transfer. [ 1813.546300] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-92a4c02a-6133-46af-a947-eb529afc6c3f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.560345] env[62510]: DEBUG oslo_vmware.rw_handles [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f4ee2a-794c-9db9-2879-37c3a97e31e2/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1813.560574] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Uploaded image 2bd1ac76-8500-47cd-b52c-8213b9025fc0 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1813.563014] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1813.563961] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-30ca4da5-cecf-4957-a93d-3095f92aa62d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.572715] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1813.572715] env[62510]: value = "task-1769277" [ 1813.572715] env[62510]: _type = "Task" [ 1813.572715] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.584947] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769277, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.591168] env[62510]: DEBUG oslo_vmware.api [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Task: {'id': task-1769276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272713} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.591386] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1813.591763] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1813.591763] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1813.592030] env[62510]: INFO nova.compute.manager [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1813.592413] env[62510]: DEBUG oslo.service.loopingcall [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1813.592578] env[62510]: DEBUG nova.compute.manager [-] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1813.592682] env[62510]: DEBUG nova.network.neutron [-] [instance: c8e69231-2786-47ac-9a44-c194088b8079] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1813.630901] env[62510]: DEBUG nova.compute.manager [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1813.631878] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcabcae-b206-4d80-a11e-0c34136a32c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.640520] env[62510]: DEBUG nova.compute.manager [req-bb34aa4d-fd36-4da4-86c4-a2d73370833b req-19f8594c-3ade-4a17-a184-9ee592167461 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-vif-deleted-19ad8f3d-cc51-441e-862f-31fabe6277ae {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1813.640520] env[62510]: INFO nova.compute.manager [req-bb34aa4d-fd36-4da4-86c4-a2d73370833b req-19f8594c-3ade-4a17-a184-9ee592167461 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Neutron deleted interface 19ad8f3d-cc51-441e-862f-31fabe6277ae; detaching it from the instance and deleting it from the info cache [ 1813.640520] env[62510]: DEBUG nova.network.neutron [req-bb34aa4d-fd36-4da4-86c4-a2d73370833b req-19f8594c-3ade-4a17-a184-9ee592167461 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1bc2d7ec-858c-45a9-8966-8c35ee7ef110", "address": "fa:16:3e:49:27:30", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bc2d7ec-85", "ovs_interfaceid": "1bc2d7ec-858c-45a9-8966-8c35ee7ef110", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.736920] env[62510]: DEBUG nova.compute.manager [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-vif-deleted-1bc2d7ec-858c-45a9-8966-8c35ee7ef110 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1813.737305] env[62510]: INFO nova.compute.manager [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Neutron deleted interface 1bc2d7ec-858c-45a9-8966-8c35ee7ef110; detaching it from the instance and deleting it from the info cache [ 1813.737654] env[62510]: DEBUG nova.network.neutron [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.796707] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525ed02a-8d0c-d4a2-47a1-89da2324a889, 'name': SearchDatastore_Task, 'duration_secs': 0.017556} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.797566] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91281c34-cfd1-4779-9859-d575b05b7337 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.806366] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1813.806366] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52349da8-0f28-fb45-dc6e-29b550703108" [ 1813.806366] env[62510]: _type = "Task" [ 1813.806366] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.824941] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52349da8-0f28-fb45-dc6e-29b550703108, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.898034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-2f7b02e8-f658-448f-b6e6-9bfa94c74da4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.898882] env[62510]: DEBUG nova.objects.instance [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lazy-loading 'flavor' on Instance uuid 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1814.038875] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.039120] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.039263] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.039456] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.039618] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.041785] env[62510]: INFO nova.compute.manager [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Terminating instance [ 1814.083817] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769277, 'name': Destroy_Task, 'duration_secs': 0.431821} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.084117] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Destroyed the VM [ 1814.084351] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1814.084598] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-52741db5-2598-4d3e-84df-250ef2e84078 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.092165] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1814.092165] env[62510]: value = "task-1769278" [ 1814.092165] env[62510]: _type = "Task" [ 1814.092165] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.101312] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769278, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.141326] env[62510]: DEBUG oslo_concurrency.lockutils [req-bb34aa4d-fd36-4da4-86c4-a2d73370833b req-19f8594c-3ade-4a17-a184-9ee592167461 service nova] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.141697] env[62510]: DEBUG oslo_concurrency.lockutils [req-bb34aa4d-fd36-4da4-86c4-a2d73370833b req-19f8594c-3ade-4a17-a184-9ee592167461 service nova] Acquired lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.143373] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1b7018-3c91-4db5-a348-02f085e5ce70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.153183] env[62510]: INFO nova.compute.manager [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] instance snapshotting [ 1814.171606] env[62510]: DEBUG oslo_concurrency.lockutils [req-bb34aa4d-fd36-4da4-86c4-a2d73370833b req-19f8594c-3ade-4a17-a184-9ee592167461 service nova] Releasing lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.171892] env[62510]: WARNING nova.compute.manager [req-bb34aa4d-fd36-4da4-86c4-a2d73370833b req-19f8594c-3ade-4a17-a184-9ee592167461 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Detach interface failed, port_id=19ad8f3d-cc51-441e-862f-31fabe6277ae, reason: No device with interface-id 19ad8f3d-cc51-441e-862f-31fabe6277ae exists on VM: nova.exception.NotFound: No device with interface-id 19ad8f3d-cc51-441e-862f-31fabe6277ae exists on VM [ 1814.176474] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc8e917-78e5-439d-9314-5bc1478c10c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.200644] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102d6f3d-f44a-4ce5-b5cb-49b1b8f9cc26 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.243233] env[62510]: DEBUG oslo_concurrency.lockutils [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.244083] env[62510]: DEBUG oslo_concurrency.lockutils [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Acquired lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.245665] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589a4954-3c61-4a27-aa4d-ed6a5c6cccee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.269567] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1814.276097] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a369c756-9a47-4c7e-8e8f-e66ec718dd3a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.309456] env[62510]: DEBUG nova.virt.vmwareapi.vmops [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfiguring VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1814.314624] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f91aae83-7456-4670-93dd-c6edb8800f84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.341939] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52349da8-0f28-fb45-dc6e-29b550703108, 'name': SearchDatastore_Task, 'duration_secs': 0.03226} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.345930] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.346225] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 313f7916-0737-4e44-ae2f-58301934bf06/313f7916-0737-4e44-ae2f-58301934bf06.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1814.346541] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Waiting for the task: (returnval){ [ 1814.346541] env[62510]: value = "task-1769279" [ 1814.346541] env[62510]: _type = "Task" [ 1814.346541] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.346953] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8fd4015-236a-4b1d-8295-62f5d3ff67c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.358956] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.363054] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1814.363054] env[62510]: value = "task-1769280" [ 1814.363054] env[62510]: _type = "Task" [ 1814.363054] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.376295] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.378666] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4e163a-9614-4b8c-87a4-26f633e8a517 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.389053] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07e4adc-de52-407b-b798-61ae1b9e1512 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.427248] env[62510]: INFO nova.network.neutron [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Port 19ad8f3d-cc51-441e-862f-31fabe6277ae from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1814.427248] env[62510]: INFO nova.network.neutron [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Port 1bc2d7ec-858c-45a9-8966-8c35ee7ef110 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1814.427248] env[62510]: DEBUG nova.network.neutron [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [{"id": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "address": "fa:16:3e:2b:3e:7e", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0d0d69b-8e", "ovs_interfaceid": "e0d0d69b-8e64-4722-b7d5-837e5c7482bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.428868] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16aa808-902d-44c8-b39d-58bf99e6bb16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.433636] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e16c594-4704-4daa-9f22-99fa67b22064 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.461588] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910b561a-df00-4826-85f9-d3a72a0f6290 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.466541] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1814.466837] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f662a58-1733-40e5-bd73-ef7083dbb27e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.480304] env[62510]: DEBUG nova.compute.provider_tree [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1814.483297] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1814.483297] env[62510]: value = "task-1769281" [ 1814.483297] env[62510]: _type = "Task" [ 1814.483297] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.493599] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.545347] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "refresh_cache-1e3e2044-a072-454f-85ba-5cb0bc36b5fd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.545544] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "refresh_cache-1e3e2044-a072-454f-85ba-5cb0bc36b5fd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.545738] env[62510]: DEBUG nova.network.neutron [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1814.608971] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769278, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.714613] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1814.714613] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-34d480f5-52b6-4b04-8d83-1e5b4ed25412 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.724065] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1814.724065] env[62510]: value = "task-1769282" [ 1814.724065] env[62510]: _type = "Task" [ 1814.724065] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.734023] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769282, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.859898] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.873404] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769280, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.939983] env[62510]: DEBUG nova.network.neutron [-] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.941847] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.007069] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769281, 'name': PowerOffVM_Task, 'duration_secs': 0.516668} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.007069] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1815.012357] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Reconfiguring VM instance instance-00000052 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1815.012702] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-970047fe-f806-4f27-bda1-253743640dc7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.029759] env[62510]: DEBUG nova.scheduler.client.report [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1815.031804] env[62510]: DEBUG nova.compute.provider_tree [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 124 to 125 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1815.032101] env[62510]: DEBUG nova.compute.provider_tree [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1815.037260] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1815.037260] env[62510]: value = "task-1769283" [ 1815.037260] env[62510]: _type = "Task" [ 1815.037260] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.052215] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769283, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.085923] env[62510]: DEBUG nova.network.neutron [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1815.107661] env[62510]: DEBUG oslo_vmware.api [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769278, 'name': RemoveSnapshot_Task, 'duration_secs': 0.578497} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.107661] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1815.107742] env[62510]: INFO nova.compute.manager [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Took 15.22 seconds to snapshot the instance on the hypervisor. [ 1815.167494] env[62510]: DEBUG nova.network.neutron [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.235149] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769282, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.362800] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.375418] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.736941} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.375738] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 313f7916-0737-4e44-ae2f-58301934bf06/313f7916-0737-4e44-ae2f-58301934bf06.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1815.375929] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1815.376226] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9b4516e-8522-48e4-a71a-39e314c5e39b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.383948] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1815.383948] env[62510]: value = "task-1769284" [ 1815.383948] env[62510]: _type = "Task" [ 1815.383948] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.394882] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.445597] env[62510]: INFO nova.compute.manager [-] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Took 1.85 seconds to deallocate network for instance. [ 1815.448255] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e9008972-1e68-4243-a5b9-7d86a9f52971 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-241d842d-3dd5-4ac2-a18a-12b9c9fbd340-19ad8f3d-cc51-441e-862f-31fabe6277ae" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 13.351s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.539252] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.539767] env[62510]: DEBUG nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1815.543020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.677s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.543020] env[62510]: DEBUG nova.objects.instance [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lazy-loading 'resources' on Instance uuid 9373089f-dbd4-4ac9-8736-e4c929fe6fb0 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1815.554710] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769283, 'name': ReconfigVM_Task, 'duration_secs': 0.37642} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.555854] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Reconfigured VM instance instance-00000052 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1815.555937] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1815.556220] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-04ecaa78-b69f-4310-9f01-5d3dd8247523 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.565176] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1815.565176] env[62510]: value = "task-1769285" [ 1815.565176] env[62510]: _type = "Task" [ 1815.565176] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.575562] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769285, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.667483] env[62510]: DEBUG nova.compute.manager [req-f2cad99a-f3c1-48a9-8b84-81d8a2ba6ac3 req-2a310e19-bcf1-48ec-bd37-3e9a4a5af6cd service nova] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Received event network-vif-deleted-db18d253-3621-4972-a6c9-1f82a650ccb0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1815.670147] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "refresh_cache-1e3e2044-a072-454f-85ba-5cb0bc36b5fd" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.670534] env[62510]: DEBUG nova.compute.manager [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1815.670721] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1815.671894] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a171dbec-d759-4f6b-bea5-3acab8547608 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.676951] env[62510]: DEBUG nova.compute.manager [None req-219a999a-89ce-4912-b739-bb16268f6cf9 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Found 1 images (rotation: 2) {{(pid=62510) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1815.680341] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1815.681123] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95606aaf-ea02-45de-bbe9-9fc675d36e69 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.691813] env[62510]: DEBUG oslo_vmware.api [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1815.691813] env[62510]: value = "task-1769286" [ 1815.691813] env[62510]: _type = "Task" [ 1815.691813] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.701698] env[62510]: DEBUG oslo_vmware.api [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769286, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.735270] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769282, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.864646] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.894422] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077364} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.894706] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1815.896091] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd861eb-69c4-45a6-840b-6b6a9646b7a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.918701] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 313f7916-0737-4e44-ae2f-58301934bf06/313f7916-0737-4e44-ae2f-58301934bf06.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1815.919018] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3598f169-fa82-453e-9ebd-0d6cdeef7b21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.940512] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1815.940512] env[62510]: value = "task-1769287" [ 1815.940512] env[62510]: _type = "Task" [ 1815.940512] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.956250] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.052330] env[62510]: DEBUG nova.compute.utils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1816.053955] env[62510]: DEBUG nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1816.054205] env[62510]: DEBUG nova.network.neutron [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1816.076820] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769285, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.097984] env[62510]: DEBUG nova.policy [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4cc966cb72fa41108733a0e93d79c410', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3109fa7889c64dfda2117d4cd58aa528', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1816.205889] env[62510]: DEBUG oslo_vmware.api [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769286, 'name': PowerOffVM_Task, 'duration_secs': 0.250243} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.206257] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1816.206551] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1816.206923] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f922ac5-280e-48af-88db-ed91304098d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.238020] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1816.238298] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1816.238510] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleting the datastore file [datastore1] 1e3e2044-a072-454f-85ba-5cb0bc36b5fd {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1816.239443] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e906a943-bc34-4a59-873e-66080ae7859b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.244517] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769282, 'name': CreateSnapshot_Task, 'duration_secs': 1.208341} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.245404] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1816.246512] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3e01f7-2f3f-412b-b8c7-67f792db01be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.254163] env[62510]: DEBUG oslo_vmware.api [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1816.254163] env[62510]: value = "task-1769289" [ 1816.254163] env[62510]: _type = "Task" [ 1816.254163] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.281796] env[62510]: DEBUG oslo_vmware.api [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.363820] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.433011] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae48151b-5787-428b-b2d9-57c6527e611e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.450520] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ecf3f9f-0d64-4a44-86b1-68b7bfea12fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.454468] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769287, 'name': ReconfigVM_Task, 'duration_secs': 0.488522} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.454710] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 313f7916-0737-4e44-ae2f-58301934bf06/313f7916-0737-4e44-ae2f-58301934bf06.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1816.455723] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebf95ab8-082b-46e2-a5fb-33e7fd13e612 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.488584] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb2bda1-efdb-4554-85dd-273bd873af87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.491752] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1816.491752] env[62510]: value = "task-1769290" [ 1816.491752] env[62510]: _type = "Task" [ 1816.491752] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.507883] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabc16f0-6079-446b-b6a7-d6d79ce99756 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.515514] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769290, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.528285] env[62510]: DEBUG nova.compute.provider_tree [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.557332] env[62510]: DEBUG nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1816.577032] env[62510]: DEBUG oslo_vmware.api [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769285, 'name': PowerOnVM_Task, 'duration_secs': 0.632796} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.577032] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1816.577032] env[62510]: DEBUG nova.compute.manager [None req-bb14168c-19ff-47a0-8da9-80cdb04852ac tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1816.577915] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c98aa4-7377-4223-90b2-550e02633b12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.640156] env[62510]: DEBUG nova.network.neutron [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Successfully created port: 022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1816.773717] env[62510]: DEBUG oslo_vmware.api [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.366944} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.774171] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1816.774750] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1816.775220] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1816.775689] env[62510]: INFO nova.compute.manager [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1816.776390] env[62510]: DEBUG oslo.service.loopingcall [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1816.776761] env[62510]: DEBUG nova.compute.manager [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1816.776953] env[62510]: DEBUG nova.network.neutron [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1816.789250] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1816.789669] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dda250c9-1732-416a-aa17-d7c2d145e819 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.799862] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1816.799862] env[62510]: value = "task-1769291" [ 1816.799862] env[62510]: _type = "Task" [ 1816.799862] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.810360] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769291, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.811351] env[62510]: DEBUG nova.network.neutron [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1816.864172] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.006011] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769290, 'name': Rename_Task, 'duration_secs': 0.28965} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.006409] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1817.006746] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f3cf769-b0dd-4dcb-a771-d355f4417441 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.015136] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1817.015136] env[62510]: value = "task-1769292" [ 1817.015136] env[62510]: _type = "Task" [ 1817.015136] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.030657] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769292, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.032915] env[62510]: DEBUG nova.scheduler.client.report [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1817.317380] env[62510]: DEBUG nova.network.neutron [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.317380] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769291, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.365041] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.541205] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.546758] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.064s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.550551] env[62510]: INFO nova.compute.claims [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1817.553383] env[62510]: DEBUG nova.compute.manager [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1817.553916] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769292, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.558619] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fa7a34-3b3f-4f67-bd4c-66b49e8359a3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.574742] env[62510]: DEBUG nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1817.578697] env[62510]: INFO nova.scheduler.client.report [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleted allocations for instance 9373089f-dbd4-4ac9-8736-e4c929fe6fb0 [ 1817.622273] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1817.622599] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1817.622819] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1817.623138] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1817.624726] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1817.624726] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1817.624726] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1817.624726] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1817.624726] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1817.624726] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1817.624726] env[62510]: DEBUG nova.virt.hardware [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1817.626563] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746c7081-4477-42c9-b65d-5378de5c9e13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.640018] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757d1225-e60b-47f1-826c-273b9cd38b16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.815368] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769291, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.818920] env[62510]: INFO nova.compute.manager [-] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Took 1.04 seconds to deallocate network for instance. [ 1817.867570] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.028767] env[62510]: DEBUG oslo_vmware.api [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769292, 'name': PowerOnVM_Task, 'duration_secs': 0.545738} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.029057] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1818.029291] env[62510]: INFO nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Took 9.37 seconds to spawn the instance on the hypervisor. [ 1818.029459] env[62510]: DEBUG nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1818.030240] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b8cd34-8ada-45db-b67a-00757d78f5ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.070585] env[62510]: INFO nova.compute.manager [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] instance snapshotting [ 1818.071617] env[62510]: DEBUG nova.objects.instance [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'flavor' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1818.086010] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2811fd10-e394-48ed-bdd1-d242c883a569 tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "9373089f-dbd4-4ac9-8736-e4c929fe6fb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.152s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.318662] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769291, 'name': CloneVM_Task} progress is 95%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.331166] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.368552] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.552744] env[62510]: INFO nova.compute.manager [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Took 35.11 seconds to build instance. [ 1818.582050] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5020833-22f9-4df3-a882-08ac9526614d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.611108] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0923fd50-0b34-4107-a427-8a4fec0c3cfe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.825022] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769291, 'name': CloneVM_Task, 'duration_secs': 1.996699} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.825022] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Created linked-clone VM from snapshot [ 1818.825022] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6073487-87c4-404d-827b-19af9b883a3e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.838536] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Uploading image 643a286d-0601-4c40-b1cb-8a3927f8e2fa {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1818.864516] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1818.864937] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7abf33a4-1aa2-4ca4-b877-98b8a47724db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.878859] env[62510]: DEBUG nova.compute.manager [req-128130d0-8f3f-4160-b301-1f3e6be1aa56 req-d2a3f463-325c-46f3-a1bf-bc5980b1c376 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-vif-plugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1818.879133] env[62510]: DEBUG oslo_concurrency.lockutils [req-128130d0-8f3f-4160-b301-1f3e6be1aa56 req-d2a3f463-325c-46f3-a1bf-bc5980b1c376 service nova] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.879417] env[62510]: DEBUG oslo_concurrency.lockutils [req-128130d0-8f3f-4160-b301-1f3e6be1aa56 req-d2a3f463-325c-46f3-a1bf-bc5980b1c376 service nova] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.879684] env[62510]: DEBUG oslo_concurrency.lockutils [req-128130d0-8f3f-4160-b301-1f3e6be1aa56 req-d2a3f463-325c-46f3-a1bf-bc5980b1c376 service nova] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.879867] env[62510]: DEBUG nova.compute.manager [req-128130d0-8f3f-4160-b301-1f3e6be1aa56 req-d2a3f463-325c-46f3-a1bf-bc5980b1c376 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] No waiting events found dispatching network-vif-plugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1818.880053] env[62510]: WARNING nova.compute.manager [req-128130d0-8f3f-4160-b301-1f3e6be1aa56 req-d2a3f463-325c-46f3-a1bf-bc5980b1c376 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received unexpected event network-vif-plugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 for instance with vm_state building and task_state spawning. [ 1818.880793] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1818.880793] env[62510]: value = "task-1769293" [ 1818.880793] env[62510]: _type = "Task" [ 1818.880793] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.881473] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.893127] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769293, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.907206] env[62510]: DEBUG nova.network.neutron [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Successfully updated port: 022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1818.989572] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdb15ee-f088-45e2-86c3-cfa3d971fd0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.005271] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94da2d3-98ec-4b93-a41f-57123d3185b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.045574] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d1c576-395d-4c32-b270-c95c18dbbe3d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.053879] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6e53c7-1df7-4e3d-8863-c13a4d6ad781 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.059236] env[62510]: DEBUG oslo_concurrency.lockutils [None req-771b5f4f-aab0-4cfe-8afe-bc0da8057684 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.653s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.070903] env[62510]: DEBUG nova.compute.provider_tree [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.127018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.127018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.127018] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1819.127018] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-53f434a6-3f42-41f2-a10b-0cc699c20222 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.135918] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1819.135918] env[62510]: value = "task-1769294" [ 1819.135918] env[62510]: _type = "Task" [ 1819.135918] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.151498] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769294, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.369808] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.391697] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769293, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.411445] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.411608] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.411787] env[62510]: DEBUG nova.network.neutron [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1819.574693] env[62510]: DEBUG nova.scheduler.client.report [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1819.630426] env[62510]: DEBUG nova.compute.utils [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1819.647346] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769294, 'name': CreateSnapshot_Task, 'duration_secs': 0.461492} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.647724] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1819.649309] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f79f6a6-91ba-44e2-921c-2aaf1e1d461a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.867542] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.891966] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769293, 'name': Destroy_Task, 'duration_secs': 0.703555} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.892276] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Destroyed the VM [ 1819.892520] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1819.892779] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0428f5a6-bccd-4fcf-aadc-5fb79eb5c258 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.899932] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1819.899932] env[62510]: value = "task-1769295" [ 1819.899932] env[62510]: _type = "Task" [ 1819.899932] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.908650] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769295, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.956667] env[62510]: DEBUG nova.network.neutron [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1820.080879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.533s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.080879] env[62510]: DEBUG nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1820.083619] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.286s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.083619] env[62510]: DEBUG nova.objects.instance [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lazy-loading 'resources' on Instance uuid f40078f0-af6b-480b-96e6-4117022c87e2 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1820.126723] env[62510]: DEBUG nova.network.neutron [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap022a0379-8a", "ovs_interfaceid": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.132960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.174400] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1820.174860] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3bd1d209-d766-4d46-9351-f523ed758b6b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.186687] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1820.186687] env[62510]: value = "task-1769296" [ 1820.186687] env[62510]: _type = "Task" [ 1820.186687] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.203749] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769296, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.271060] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.271691] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.369918] env[62510]: DEBUG oslo_vmware.api [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Task: {'id': task-1769279, 'name': ReconfigVM_Task, 'duration_secs': 5.909861} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.370270] env[62510]: DEBUG oslo_concurrency.lockutils [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] Releasing lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.370526] env[62510]: DEBUG nova.virt.vmwareapi.vmops [req-b3630f8f-e98d-4535-bf8f-455052c28bff req-aeeb4aee-82d2-456b-9312-0d1ef32b3895 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Reconfigured VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1820.371019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.102s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.371244] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.371453] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.371620] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.373722] env[62510]: INFO nova.compute.manager [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Terminating instance [ 1820.412805] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769295, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.542352] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "313f7916-0737-4e44-ae2f-58301934bf06" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.542643] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.542832] env[62510]: INFO nova.compute.manager [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Shelving [ 1820.587290] env[62510]: DEBUG nova.compute.utils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1820.593082] env[62510]: DEBUG nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1820.593082] env[62510]: DEBUG nova.network.neutron [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1820.629693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.630094] env[62510]: DEBUG nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Instance network_info: |[{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap022a0379-8a", "ovs_interfaceid": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1820.631154] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:f0:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '022a0379-8a0f-412f-a55a-f8fcaf1102f3', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1820.638359] env[62510]: DEBUG oslo.service.loopingcall [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1820.639029] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1820.642375] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afcecb5b-473a-4545-94c6-455e1be8a78a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.665104] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1820.665104] env[62510]: value = "task-1769297" [ 1820.665104] env[62510]: _type = "Task" [ 1820.665104] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.666934] env[62510]: DEBUG nova.policy [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a406a2bf0ccd4b99ba7dcb359a9b640e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e144c0bd2d124193a65ad53de8c43039', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1820.683213] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769297, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.699967] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769296, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.774992] env[62510]: DEBUG nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1820.878017] env[62510]: DEBUG nova.compute.manager [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1820.878371] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1820.879264] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302b4749-2644-4a62-9e7b-22cd9bf5d92b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.888009] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1820.888428] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c33db17d-c71f-4f53-a99b-c38aaa47dbd2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.895579] env[62510]: DEBUG oslo_vmware.api [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1820.895579] env[62510]: value = "task-1769298" [ 1820.895579] env[62510]: _type = "Task" [ 1820.895579] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.912481] env[62510]: DEBUG oslo_vmware.api [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769295, 'name': RemoveSnapshot_Task, 'duration_secs': 0.916839} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.912729] env[62510]: DEBUG oslo_vmware.api [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.915462] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1820.962595] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006c6f68-76c3-487d-a506-752418104454 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.973934] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4411bbf5-d736-4397-8bc5-e4736b4d458c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.016699] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e291a0-44ed-4e40-b60c-df27e1ad1f82 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.025675] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45abc52-72d8-4f41-a79c-17a33092435f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.041490] env[62510]: DEBUG nova.compute.provider_tree [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1821.094558] env[62510]: DEBUG nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1821.180455] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769297, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.202284] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769296, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.206140] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.206328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.206533] env[62510]: INFO nova.compute.manager [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Attaching volume b844eb5c-7644-4bb5-900e-d0a16620fbe8 to /dev/sdb [ 1821.225941] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.226108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.233800] env[62510]: DEBUG nova.network.neutron [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Successfully created port: 01ac60bf-a53d-4e8f-a7ff-3329360878af {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1821.260702] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d4a7c3-caf0-4b3c-ab1d-473c0b3e5b22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.270190] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85893ad9-43d7-4d81-8199-2ba9adf31787 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.292951] env[62510]: DEBUG nova.virt.block_device [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updating existing volume attachment record: e4c647dc-10c2-4101-a271-3525505d26ea {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1821.309199] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.409974] env[62510]: DEBUG oslo_vmware.api [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769298, 'name': PowerOffVM_Task, 'duration_secs': 0.322778} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.410375] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1821.410449] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1821.410710] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-383457be-3f34-4c3c-b32d-9fc31daf8097 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.420057] env[62510]: WARNING nova.compute.manager [None req-fedbab3f-d0a3-4b71-a4c6-d31b6621912f tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Image not found during snapshot: nova.exception.ImageNotFound: Image 643a286d-0601-4c40-b1cb-8a3927f8e2fa could not be found. [ 1821.535515] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1821.535515] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1821.535515] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleting the datastore file [datastore1] 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1821.535515] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21f4207e-689d-48a3-9f7c-68df54e14862 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.547511] env[62510]: DEBUG nova.scheduler.client.report [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1821.553274] env[62510]: DEBUG oslo_vmware.api [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1821.553274] env[62510]: value = "task-1769301" [ 1821.553274] env[62510]: _type = "Task" [ 1821.553274] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.566154] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1821.571239] env[62510]: DEBUG oslo_vmware.api [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.571239] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-890f9654-ad20-457b-9432-6cdbca0aca24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.576651] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1821.576651] env[62510]: value = "task-1769302" [ 1821.576651] env[62510]: _type = "Task" [ 1821.576651] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.591723] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.683342] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769297, 'name': CreateVM_Task, 'duration_secs': 0.690852} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.683342] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1821.683981] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.684194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.684655] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1821.684798] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c236049-bce4-4e35-93a7-602241eac158 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.690863] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1821.690863] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528e1d12-c2ca-61a0-ffb3-17dcfccb52e3" [ 1821.690863] env[62510]: _type = "Task" [ 1821.690863] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.703894] env[62510]: DEBUG nova.compute.manager [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1821.703931] env[62510]: DEBUG nova.compute.manager [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing instance network info cache due to event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1821.707021] env[62510]: DEBUG oslo_concurrency.lockutils [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] Acquiring lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.707021] env[62510]: DEBUG oslo_concurrency.lockutils [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] Acquired lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.707021] env[62510]: DEBUG nova.network.neutron [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1821.720029] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769296, 'name': CloneVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.720754] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528e1d12-c2ca-61a0-ffb3-17dcfccb52e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.728651] env[62510]: DEBUG nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1821.860302] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1821.861273] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.055150] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.972s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.058958] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.816s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.059238] env[62510]: DEBUG nova.objects.instance [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lazy-loading 'resources' on Instance uuid 1dc9e3b6-5e75-49b4-aef0-01200fb9be47 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1822.069852] env[62510]: DEBUG oslo_vmware.api [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30985} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.070685] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1822.070790] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1822.071283] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1822.071283] env[62510]: INFO nova.compute.manager [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1822.071371] env[62510]: DEBUG oslo.service.loopingcall [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1822.071516] env[62510]: DEBUG nova.compute.manager [-] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1822.071619] env[62510]: DEBUG nova.network.neutron [-] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1822.089064] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769302, 'name': PowerOffVM_Task, 'duration_secs': 0.241184} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.089979] env[62510]: INFO nova.scheduler.client.report [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Deleted allocations for instance f40078f0-af6b-480b-96e6-4117022c87e2 [ 1822.090947] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1822.096918] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1443af-204c-4d03-83ef-ba73ab294422 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.103995] env[62510]: DEBUG nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1822.121055] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14aff686-5483-4442-87bb-bfe6775367cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.151209] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1822.151546] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1822.151632] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1822.151812] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1822.152587] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1822.152587] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1822.152761] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1822.152910] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1822.153208] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1822.153451] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1822.153641] env[62510]: DEBUG nova.virt.hardware [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1822.158357] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2d8ca1-97c6-46ca-8c67-677e456f61f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.168226] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7011da9-6940-4cec-8f3d-936324cdef4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.186763] env[62510]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 1bc2d7ec-858c-45a9-8966-8c35ee7ef110 could not be found.", "detail": ""}} {{(pid=62510) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1822.187231] env[62510]: DEBUG nova.network.neutron [-] Unable to show port 1bc2d7ec-858c-45a9-8966-8c35ee7ef110 as it no longer exists. {{(pid=62510) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1822.203853] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769296, 'name': CloneVM_Task, 'duration_secs': 1.562649} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.204563] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Created linked-clone VM from snapshot [ 1822.205425] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cb9e9f-f9ef-4b61-bbfc-6ca120518391 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.211641] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528e1d12-c2ca-61a0-ffb3-17dcfccb52e3, 'name': SearchDatastore_Task, 'duration_secs': 0.037963} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.212663] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.212663] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1822.212911] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.213108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.213333] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.213628] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f860c1d7-3cbe-41bd-bbb7-b623589de8f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.218730] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Uploading image f76254e3-6418-4890-b818-183d24e89b62 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1822.234262] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.234262] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1822.234262] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b731c5b8-5921-46e9-acfe-210fe72e46e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.243964] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1822.243964] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e1bf42-0ab8-15d6-efa8-fc556271c8fc" [ 1822.243964] env[62510]: _type = "Task" [ 1822.243964] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.250939] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1822.250939] env[62510]: value = "vm-367440" [ 1822.250939] env[62510]: _type = "VirtualMachine" [ 1822.250939] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1822.251728] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-41cf3255-d03f-446e-87d0-a39cabc95a04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.257208] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.257813] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e1bf42-0ab8-15d6-efa8-fc556271c8fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.265434] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease: (returnval){ [ 1822.265434] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52197b71-02af-3bf7-0729-4fbc9f69e0be" [ 1822.265434] env[62510]: _type = "HttpNfcLease" [ 1822.265434] env[62510]: } obtained for exporting VM: (result){ [ 1822.265434] env[62510]: value = "vm-367440" [ 1822.265434] env[62510]: _type = "VirtualMachine" [ 1822.265434] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1822.265706] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the lease: (returnval){ [ 1822.265706] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52197b71-02af-3bf7-0729-4fbc9f69e0be" [ 1822.265706] env[62510]: _type = "HttpNfcLease" [ 1822.265706] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1822.275611] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1822.275611] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52197b71-02af-3bf7-0729-4fbc9f69e0be" [ 1822.275611] env[62510]: _type = "HttpNfcLease" [ 1822.275611] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1822.362814] env[62510]: DEBUG nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1822.568282] env[62510]: DEBUG nova.network.neutron [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updated VIF entry in instance network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1822.568762] env[62510]: DEBUG nova.network.neutron [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap022a0379-8a", "ovs_interfaceid": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.603744] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bdf94947-4a89-4af4-a764-8935c0f78cca tempest-VolumesAdminNegativeTest-1464576921 tempest-VolumesAdminNegativeTest-1464576921-project-member] Lock "f40078f0-af6b-480b-96e6-4117022c87e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.386s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.634887] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1822.634887] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-90a27478-2a8e-4ce1-a554-cf00525b1461 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.645344] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1822.645344] env[62510]: value = "task-1769306" [ 1822.645344] env[62510]: _type = "Task" [ 1822.645344] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.661505] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769306, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.728856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "9fe592c1-e23a-46d5-8952-c181709d93e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.729172] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.729431] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "9fe592c1-e23a-46d5-8952-c181709d93e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.729664] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.729853] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.734750] env[62510]: INFO nova.compute.manager [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Terminating instance [ 1822.758255] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e1bf42-0ab8-15d6-efa8-fc556271c8fc, 'name': SearchDatastore_Task, 'duration_secs': 0.032499} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.759886] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd274b6a-5f33-4b36-812f-da75a3cf23b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.771092] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1822.771092] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52753c49-b238-aca8-6c71-401d06517fb6" [ 1822.771092] env[62510]: _type = "Task" [ 1822.771092] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.780175] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1822.780175] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52197b71-02af-3bf7-0729-4fbc9f69e0be" [ 1822.780175] env[62510]: _type = "HttpNfcLease" [ 1822.780175] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1822.780175] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1822.780175] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52197b71-02af-3bf7-0729-4fbc9f69e0be" [ 1822.780175] env[62510]: _type = "HttpNfcLease" [ 1822.780175] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1822.780175] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec0aec5-6e5d-4d72-8981-b5e56bba5550 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.785545] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52753c49-b238-aca8-6c71-401d06517fb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.794526] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ab937-0c52-0e93-4c57-65560e4d1981/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1822.794720] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ab937-0c52-0e93-4c57-65560e4d1981/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1822.865122] env[62510]: DEBUG nova.compute.manager [req-278ff306-a303-4881-82dd-fb49ce6651de req-36ae6a03-0f48-4c71-863f-d7a8578f1602 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Received event network-vif-deleted-e0d0d69b-8e64-4722-b7d5-837e5c7482bc {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1822.865122] env[62510]: INFO nova.compute.manager [req-278ff306-a303-4881-82dd-fb49ce6651de req-36ae6a03-0f48-4c71-863f-d7a8578f1602 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Neutron deleted interface e0d0d69b-8e64-4722-b7d5-837e5c7482bc; detaching it from the instance and deleting it from the info cache [ 1822.865122] env[62510]: DEBUG nova.network.neutron [req-278ff306-a303-4881-82dd-fb49ce6651de req-36ae6a03-0f48-4c71-863f-d7a8578f1602 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.894611] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.903022] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c952ceb0-d9d8-4019-8106-4833786f3b4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.018873] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8782b05a-0a7b-4d8f-9a14-d53cb0dd2e5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.027037] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b67fb09-b89a-4391-8bb6-9c8c71e9670b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.060793] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91967a6c-e900-4ec6-b878-1293ba911737 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.068327] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4011a8a6-8ef0-45e5-9306-331e117439e0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.072655] env[62510]: DEBUG oslo_concurrency.lockutils [req-790938c0-9b0c-4e49-843e-bbd21405893d req-3a620870-894e-478f-b762-56313a2e271e service nova] Releasing lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.083631] env[62510]: DEBUG nova.compute.provider_tree [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1823.156590] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769306, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.185977] env[62510]: DEBUG nova.network.neutron [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Successfully updated port: 01ac60bf-a53d-4e8f-a7ff-3329360878af {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1823.239887] env[62510]: DEBUG nova.compute.manager [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1823.239887] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1823.240088] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d39c98e-ade5-47e9-b1f6-a59e5bdca1c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.249400] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1823.249782] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3782c7ce-cf4d-41be-b980-731b67317621 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.257520] env[62510]: DEBUG oslo_vmware.api [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1823.257520] env[62510]: value = "task-1769307" [ 1823.257520] env[62510]: _type = "Task" [ 1823.257520] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.266660] env[62510]: DEBUG oslo_vmware.api [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.285041] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52753c49-b238-aca8-6c71-401d06517fb6, 'name': SearchDatastore_Task, 'duration_secs': 0.013088} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.285041] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.285041] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1823.285041] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ee9ea67-36ac-46cd-9363-e64ca2ab5a96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.286218] env[62510]: DEBUG nova.network.neutron [-] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.298116] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1823.298116] env[62510]: value = "task-1769308" [ 1823.298116] env[62510]: _type = "Task" [ 1823.298116] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.310849] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.368180] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79e647af-7d4a-4bbd-b8e5-873b403b3db2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.379623] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d0c06b-8fd2-4e2c-b244-ed2b46073eb5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.425945] env[62510]: DEBUG nova.compute.manager [req-278ff306-a303-4881-82dd-fb49ce6651de req-36ae6a03-0f48-4c71-863f-d7a8578f1602 service nova] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Detach interface failed, port_id=e0d0d69b-8e64-4722-b7d5-837e5c7482bc, reason: Instance 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1823.587926] env[62510]: DEBUG nova.scheduler.client.report [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1823.660322] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769306, 'name': CreateSnapshot_Task, 'duration_secs': 0.862638} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.660322] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1823.660322] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c176b6dd-c12d-4c11-97af-8a5966e2ed1a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.689235] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "refresh_cache-ebd2dc4b-8d74-47db-861e-870d41a4150b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.689499] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "refresh_cache-ebd2dc4b-8d74-47db-861e-870d41a4150b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.689660] env[62510]: DEBUG nova.network.neutron [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1823.745217] env[62510]: DEBUG nova.compute.manager [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Received event network-vif-plugged-01ac60bf-a53d-4e8f-a7ff-3329360878af {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1823.745303] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] Acquiring lock "ebd2dc4b-8d74-47db-861e-870d41a4150b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.747363] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.747363] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.747363] env[62510]: DEBUG nova.compute.manager [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] No waiting events found dispatching network-vif-plugged-01ac60bf-a53d-4e8f-a7ff-3329360878af {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1823.747363] env[62510]: WARNING nova.compute.manager [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Received unexpected event network-vif-plugged-01ac60bf-a53d-4e8f-a7ff-3329360878af for instance with vm_state building and task_state spawning. [ 1823.747363] env[62510]: DEBUG nova.compute.manager [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Received event network-changed-01ac60bf-a53d-4e8f-a7ff-3329360878af {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1823.747363] env[62510]: DEBUG nova.compute.manager [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Refreshing instance network info cache due to event network-changed-01ac60bf-a53d-4e8f-a7ff-3329360878af. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1823.747752] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] Acquiring lock "refresh_cache-ebd2dc4b-8d74-47db-861e-870d41a4150b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.770915] env[62510]: DEBUG oslo_vmware.api [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769307, 'name': PowerOffVM_Task, 'duration_secs': 0.246355} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.770915] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1823.770915] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1823.770915] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a4e860c-2475-49d0-8590-05892db9c735 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.795070] env[62510]: INFO nova.compute.manager [-] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Took 1.72 seconds to deallocate network for instance. [ 1823.812524] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769308, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.015902] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1824.016278] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1824.016432] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleting the datastore file [datastore1] 9fe592c1-e23a-46d5-8952-c181709d93e7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1824.017626] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5106acf-87eb-42c5-ba25-18b22673abf0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.025388] env[62510]: DEBUG oslo_vmware.api [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for the task: (returnval){ [ 1824.025388] env[62510]: value = "task-1769310" [ 1824.025388] env[62510]: _type = "Task" [ 1824.025388] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.037849] env[62510]: DEBUG oslo_vmware.api [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.096025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.100697] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.035s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.100937] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.102826] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.371s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.103068] env[62510]: DEBUG nova.objects.instance [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lazy-loading 'resources' on Instance uuid e7daad63-c802-4a86-bead-7e849064ed61 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1824.126074] env[62510]: INFO nova.scheduler.client.report [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted allocations for instance 77f485ae-9c4c-424e-8bac-6d023e428767 [ 1824.126074] env[62510]: INFO nova.scheduler.client.report [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Deleted allocations for instance 1dc9e3b6-5e75-49b4-aef0-01200fb9be47 [ 1824.183128] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1824.185057] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2e12f945-5b67-4ce7-9fd4-23ef21eda2cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.196537] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1824.196537] env[62510]: value = "task-1769311" [ 1824.196537] env[62510]: _type = "Task" [ 1824.196537] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.210147] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769311, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.263920] env[62510]: DEBUG nova.network.neutron [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1824.307867] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.315477] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596412} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.315866] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1824.316131] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1824.316788] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56535b91-95f1-4f23-a22b-1c564ad5266b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.325027] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1824.325027] env[62510]: value = "task-1769312" [ 1824.325027] env[62510]: _type = "Task" [ 1824.325027] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.337771] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.522702] env[62510]: DEBUG nova.network.neutron [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Updating instance_info_cache with network_info: [{"id": "01ac60bf-a53d-4e8f-a7ff-3329360878af", "address": "fa:16:3e:8b:d7:64", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01ac60bf-a5", "ovs_interfaceid": "01ac60bf-a53d-4e8f-a7ff-3329360878af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.537620] env[62510]: DEBUG oslo_vmware.api [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Task: {'id': task-1769310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189627} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.537968] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1824.538283] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1824.538462] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1824.538668] env[62510]: INFO nova.compute.manager [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1824.538949] env[62510]: DEBUG oslo.service.loopingcall [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1824.539981] env[62510]: DEBUG nova.compute.manager [-] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1824.540134] env[62510]: DEBUG nova.network.neutron [-] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1824.637473] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8189f79b-a4ba-46f9-9194-d393d9d2c4e3 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "77f485ae-9c4c-424e-8bac-6d023e428767" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.267s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.647610] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d012cbfe-4aeb-4b3a-9ae0-e5c80ec2d32c tempest-ServerAddressesNegativeTestJSON-163867214 tempest-ServerAddressesNegativeTestJSON-163867214-project-member] Lock "1dc9e3b6-5e75-49b4-aef0-01200fb9be47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.163s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.715747] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769311, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.839310] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10984} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.840071] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1824.840550] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1552a2ee-2289-4aaf-bb5a-5c2790b60e84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.866142] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1824.869354] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0897ead3-142b-4c67-8900-b7186bbc463a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.892899] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1824.892899] env[62510]: value = "task-1769313" [ 1824.892899] env[62510]: _type = "Task" [ 1824.892899] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.906405] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769313, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.025860] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "refresh_cache-ebd2dc4b-8d74-47db-861e-870d41a4150b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.026495] env[62510]: DEBUG nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Instance network_info: |[{"id": "01ac60bf-a53d-4e8f-a7ff-3329360878af", "address": "fa:16:3e:8b:d7:64", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01ac60bf-a5", "ovs_interfaceid": "01ac60bf-a53d-4e8f-a7ff-3329360878af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1825.028087] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] Acquired lock "refresh_cache-ebd2dc4b-8d74-47db-861e-870d41a4150b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.028087] env[62510]: DEBUG nova.network.neutron [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Refreshing network info cache for port 01ac60bf-a53d-4e8f-a7ff-3329360878af {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1825.028651] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:d7:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '87bbf4e0-9064-4516-b7e7-44973f817205', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01ac60bf-a53d-4e8f-a7ff-3329360878af', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1825.037774] env[62510]: DEBUG oslo.service.loopingcall [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.042917] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1825.044452] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d094cdd5-a102-47b9-9017-3a6080f281a4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.048419] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac8a2035-099b-4dec-b5bb-f848da13db30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.078687] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f801da4-b734-4251-b84e-cc6cc390d394 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.082814] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1825.082814] env[62510]: value = "task-1769314" [ 1825.082814] env[62510]: _type = "Task" [ 1825.082814] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.122014] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4721acb8-4828-4a20-896e-057473a00e2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.130319] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769314, 'name': CreateVM_Task} progress is 15%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.138229] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74f0f23-4ec8-4a69-9173-562a447059ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.154923] env[62510]: DEBUG nova.compute.provider_tree [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.209870] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769311, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.408741] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769313, 'name': ReconfigVM_Task, 'duration_secs': 0.474981} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.410597] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Reconfigured VM instance instance-0000005c to attach disk [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1825.411525] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01421c90-8660-475e-a77c-df4ce6d87e17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.421021] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1825.421021] env[62510]: value = "task-1769315" [ 1825.421021] env[62510]: _type = "Task" [ 1825.421021] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.434286] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769315, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.477153] env[62510]: DEBUG nova.network.neutron [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Updated VIF entry in instance network info cache for port 01ac60bf-a53d-4e8f-a7ff-3329360878af. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1825.477771] env[62510]: DEBUG nova.network.neutron [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Updating instance_info_cache with network_info: [{"id": "01ac60bf-a53d-4e8f-a7ff-3329360878af", "address": "fa:16:3e:8b:d7:64", "network": {"id": "bf59f5d9-5154-4120-9edd-03529b552382", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2003015829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e144c0bd2d124193a65ad53de8c43039", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "87bbf4e0-9064-4516-b7e7-44973f817205", "external-id": "nsx-vlan-transportzone-507", "segmentation_id": 507, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01ac60bf-a5", "ovs_interfaceid": "01ac60bf-a53d-4e8f-a7ff-3329360878af", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.600057] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769314, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.658624] env[62510]: DEBUG nova.scheduler.client.report [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1825.710238] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769311, 'name': CloneVM_Task, 'duration_secs': 1.512586} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.710562] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Created linked-clone VM from snapshot [ 1825.711345] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab380f1-3b78-4248-96f3-66fcd6ea264f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.726114] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Uploading image 3fe87b83-b1ae-4d1f-8939-42b9e3d5d598 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1825.768962] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1825.768962] env[62510]: value = "vm-367445" [ 1825.768962] env[62510]: _type = "VirtualMachine" [ 1825.768962] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1825.769547] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-995603fd-22cb-446b-ba2c-f3fd55da1f7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.779851] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lease: (returnval){ [ 1825.779851] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cf2d60-0865-0963-54ba-ac3657577786" [ 1825.779851] env[62510]: _type = "HttpNfcLease" [ 1825.779851] env[62510]: } obtained for exporting VM: (result){ [ 1825.779851] env[62510]: value = "vm-367445" [ 1825.779851] env[62510]: _type = "VirtualMachine" [ 1825.779851] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1825.783405] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the lease: (returnval){ [ 1825.783405] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cf2d60-0865-0963-54ba-ac3657577786" [ 1825.783405] env[62510]: _type = "HttpNfcLease" [ 1825.783405] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1825.789556] env[62510]: DEBUG nova.compute.manager [req-161dd70d-fa7f-43ee-a6da-27a5a5fbf46e req-0b40e5ae-8dd8-4aa3-ae93-b79d524403ad service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Received event network-vif-deleted-ca1200b2-6f64-4952-a587-f2fdb0fc14d1 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1825.789725] env[62510]: INFO nova.compute.manager [req-161dd70d-fa7f-43ee-a6da-27a5a5fbf46e req-0b40e5ae-8dd8-4aa3-ae93-b79d524403ad service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Neutron deleted interface ca1200b2-6f64-4952-a587-f2fdb0fc14d1; detaching it from the instance and deleting it from the info cache [ 1825.789915] env[62510]: DEBUG nova.network.neutron [req-161dd70d-fa7f-43ee-a6da-27a5a5fbf46e req-0b40e5ae-8dd8-4aa3-ae93-b79d524403ad service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.797577] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1825.797577] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cf2d60-0865-0963-54ba-ac3657577786" [ 1825.797577] env[62510]: _type = "HttpNfcLease" [ 1825.797577] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1825.798200] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1825.798200] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cf2d60-0865-0963-54ba-ac3657577786" [ 1825.798200] env[62510]: _type = "HttpNfcLease" [ 1825.798200] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1825.799151] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3287de-3168-4c7a-a381-3017deed3d1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.810676] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523db917-a835-adf3-ef45-a82e7bbb8439/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1825.810676] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523db917-a835-adf3-ef45-a82e7bbb8439/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1825.905618] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-99bbfe29-e370-4ece-9127-23c80c4c9649 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.911862] env[62510]: DEBUG nova.network.neutron [-] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.933256] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769315, 'name': Rename_Task, 'duration_secs': 0.233409} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.935939] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1825.936620] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e16bc708-2ff3-4578-85ea-75615835c902 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.949037] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1825.949037] env[62510]: value = "task-1769318" [ 1825.949037] env[62510]: _type = "Task" [ 1825.949037] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.958717] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.980529] env[62510]: DEBUG oslo_concurrency.lockutils [req-c8adbd0a-43a1-44ce-a04b-094c712bfa42 req-54cb7435-f821-49c3-b731-34462d7db5ea service nova] Releasing lock "refresh_cache-ebd2dc4b-8d74-47db-861e-870d41a4150b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.096325] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769314, 'name': CreateVM_Task, 'duration_secs': 0.550195} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.096573] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1826.097450] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.097650] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.098324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.098324] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-892fd899-9ffa-48d7-a723-e7e21ffc6db1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.105486] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1826.105486] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5202e9f4-912e-6635-fae6-2f7c5f69c4de" [ 1826.105486] env[62510]: _type = "Task" [ 1826.105486] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.115788] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5202e9f4-912e-6635-fae6-2f7c5f69c4de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.167362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.064s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.170212] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.341s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.200110] env[62510]: INFO nova.scheduler.client.report [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Deleted allocations for instance e7daad63-c802-4a86-bead-7e849064ed61 [ 1826.298376] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1478e001-9b83-4591-a151-8c0bb95dc44e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.311037] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3b03e1-55b2-47af-befd-185495ff3863 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.359361] env[62510]: DEBUG nova.compute.manager [req-161dd70d-fa7f-43ee-a6da-27a5a5fbf46e req-0b40e5ae-8dd8-4aa3-ae93-b79d524403ad service nova] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Detach interface failed, port_id=ca1200b2-6f64-4952-a587-f2fdb0fc14d1, reason: Instance 9fe592c1-e23a-46d5-8952-c181709d93e7 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1826.416239] env[62510]: INFO nova.compute.manager [-] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Took 1.88 seconds to deallocate network for instance. [ 1826.463680] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769318, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.632986] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5202e9f4-912e-6635-fae6-2f7c5f69c4de, 'name': SearchDatastore_Task, 'duration_secs': 0.018033} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.633822] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.633822] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.634948] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.634948] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.635249] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.635989] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05d0d247-d30d-4f0e-82d6-798c3936081d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.650455] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.650654] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1826.651555] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e577e7c1-49f2-4eb5-86d7-614e2fcf7225 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.660373] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1826.660373] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5208adbf-032e-1227-fc23-a901f66086ae" [ 1826.660373] env[62510]: _type = "Task" [ 1826.660373] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.672290] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5208adbf-032e-1227-fc23-a901f66086ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.714821] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dc9b7a14-8df2-4991-8107-0a2dd2111e82 tempest-ServersNegativeTestJSON-402191007 tempest-ServersNegativeTestJSON-402191007-project-member] Lock "e7daad63-c802-4a86-bead-7e849064ed61" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.591s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.928699] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.968011] env[62510]: DEBUG oslo_vmware.api [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769318, 'name': PowerOnVM_Task, 'duration_secs': 0.73548} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.968672] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1826.968875] env[62510]: INFO nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Took 9.39 seconds to spawn the instance on the hypervisor. [ 1826.969082] env[62510]: DEBUG nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1826.970246] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608bbb02-bcdf-4b8c-af74-b554df8d357f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.194282] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5208adbf-032e-1227-fc23-a901f66086ae, 'name': SearchDatastore_Task, 'duration_secs': 0.011683} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.195564] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cba04272-fdb3-4cb0-ad45-0cd6f3d36c56 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.203792] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1827.203792] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a235d1-7363-336f-90bc-23ceb1adf9b3" [ 1827.203792] env[62510]: _type = "Task" [ 1827.203792] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.214676] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a235d1-7363-336f-90bc-23ceb1adf9b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.221728] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8ffa27e9-6a3b-48d1-aed4-c808089788d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.222222] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.222222] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.222400] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1827.222572] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 841460b0-d917-44ea-88c6-0e5a3022f658 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.222804] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance c8e69231-2786-47ac-9a44-c194088b8079 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1827.223026] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.223234] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 91a76cc7-7f82-42cf-a379-fc0ba3d04568 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1827.223402] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.223707] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8a230335-6388-45fb-a29e-9e63ddb4d5f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.223707] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 31772dc9-4f04-42df-9e3b-3200cc72c977 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.223858] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 1e3e2044-a072-454f-85ba-5cb0bc36b5fd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1827.224043] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9fe592c1-e23a-46d5-8952-c181709d93e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.224176] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 313f7916-0737-4e44-ae2f-58301934bf06 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.224302] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance cf4160a8-1160-45fc-b9e5-e9526b6c1506 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.224403] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance ebd2dc4b-8d74-47db-861e-870d41a4150b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1827.503448] env[62510]: INFO nova.compute.manager [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Took 25.92 seconds to build instance. [ 1827.714714] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a235d1-7363-336f-90bc-23ceb1adf9b3, 'name': SearchDatastore_Task, 'duration_secs': 0.013162} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.715108] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.715278] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] ebd2dc4b-8d74-47db-861e-870d41a4150b/ebd2dc4b-8d74-47db-861e-870d41a4150b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1827.715555] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4157c91-da93-4658-80de-8a4bf6e2a0d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.727621] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 01204162-bf8e-46e0-bcf4-00df9ed7e7ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1827.733432] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1827.733432] env[62510]: value = "task-1769319" [ 1827.733432] env[62510]: _type = "Task" [ 1827.733432] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.743852] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.006748] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66d68647-e998-41a7-87fc-3dd2f31573c5 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.439s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.238029] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e7e053be-cb88-4ae0-b157-3006211f77d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1828.257322] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769319, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.362168] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1828.362168] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1828.362168] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485aff6e-403b-4dee-b382-3f054ad588e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.386810] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98f25d5-8432-48af-9c9e-c2f73ee17788 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.421739] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8/volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1828.422151] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63a0e74b-199b-4b87-a08b-927f4b246e54 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.446749] env[62510]: DEBUG oslo_vmware.api [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1828.446749] env[62510]: value = "task-1769320" [ 1828.446749] env[62510]: _type = "Task" [ 1828.446749] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.457427] env[62510]: DEBUG oslo_vmware.api [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769320, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.753018] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2e24b76d-a770-4f1e-a8f1-a54417f1be81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1828.754394] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769319, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.793237] env[62510]: DEBUG nova.compute.manager [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1828.793492] env[62510]: DEBUG nova.compute.manager [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing instance network info cache due to event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1828.793783] env[62510]: DEBUG oslo_concurrency.lockutils [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] Acquiring lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.794030] env[62510]: DEBUG oslo_concurrency.lockutils [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] Acquired lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.794410] env[62510]: DEBUG nova.network.neutron [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1828.958965] env[62510]: DEBUG oslo_vmware.api [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769320, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.254014] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769319, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.286703} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.254390] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] ebd2dc4b-8d74-47db-861e-870d41a4150b/ebd2dc4b-8d74-47db-861e-870d41a4150b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1829.254676] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1829.254999] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40e0184e-4cd6-4132-8b63-6d280bf70c17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.258619] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 5f229f78-6c5d-4170-bdd4-c5522b137949 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1829.263347] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1829.263347] env[62510]: value = "task-1769321" [ 1829.263347] env[62510]: _type = "Task" [ 1829.263347] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.276062] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769321, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.459207] env[62510]: DEBUG oslo_vmware.api [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769320, 'name': ReconfigVM_Task, 'duration_secs': 0.657629} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.459573] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfigured VM instance instance-0000004e to attach disk [datastore1] volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8/volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1829.465016] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96b0f0fd-06d7-42b7-a507-96074be0cf88 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.483214] env[62510]: DEBUG oslo_vmware.api [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1829.483214] env[62510]: value = "task-1769322" [ 1829.483214] env[62510]: _type = "Task" [ 1829.483214] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.497688] env[62510]: DEBUG oslo_vmware.api [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769322, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.588875] env[62510]: DEBUG nova.network.neutron [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updated VIF entry in instance network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1829.588875] env[62510]: DEBUG nova.network.neutron [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap022a0379-8a", "ovs_interfaceid": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.762066] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1829.762414] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1829.762528] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1829.774938] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769321, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086896} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.775281] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1829.777178] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7911f39-3c3d-497e-9997-e9de61c749ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.800127] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] ebd2dc4b-8d74-47db-861e-870d41a4150b/ebd2dc4b-8d74-47db-861e-870d41a4150b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1829.803044] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d711b2c6-954f-4c5f-a916-621ab1cd5a69 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.824919] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1829.824919] env[62510]: value = "task-1769323" [ 1829.824919] env[62510]: _type = "Task" [ 1829.824919] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.838690] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.000612] env[62510]: DEBUG oslo_vmware.api [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769322, 'name': ReconfigVM_Task, 'duration_secs': 0.184452} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.000944] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1830.093792] env[62510]: DEBUG oslo_concurrency.lockutils [req-1f56e249-78bd-4925-90a3-c9b63cf1e1f2 req-8b9d0ea3-60c9-4595-8fef-f4f8adb53239 service nova] Releasing lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.132399] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb2d4ac-c951-4fc8-8a51-146712b113a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.142148] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9110cc-9307-4daf-a087-89a48e964dcd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.185622] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8572f6-8eed-43b2-bd17-4c1016fcee41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.194966] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75399ce9-ed6e-47f9-923c-8d4d3b156941 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.212122] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.336349] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769323, 'name': ReconfigVM_Task, 'duration_secs': 0.45165} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.336349] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Reconfigured VM instance instance-0000005d to attach disk [datastore1] ebd2dc4b-8d74-47db-861e-870d41a4150b/ebd2dc4b-8d74-47db-861e-870d41a4150b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1830.337739] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7acc33cb-6fd3-4426-a14d-c27521deaeb2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.353187] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1830.353187] env[62510]: value = "task-1769324" [ 1830.353187] env[62510]: _type = "Task" [ 1830.353187] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.363285] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769324, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.718111] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1830.862976] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769324, 'name': Rename_Task, 'duration_secs': 0.220186} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.863593] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1830.863988] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f27ab46e-596f-4401-a9b7-3540c9f4b7b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.872637] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1830.872637] env[62510]: value = "task-1769325" [ 1830.872637] env[62510]: _type = "Task" [ 1830.872637] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.882471] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769325, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.048470] env[62510]: DEBUG nova.objects.instance [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'flavor' on Instance uuid bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1831.227021] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1831.227021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.055s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.227021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.253s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.227021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.227896] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.189s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.230619] env[62510]: INFO nova.compute.claims [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1831.234246] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1831.234563] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 1831.260669] env[62510]: INFO nova.scheduler.client.report [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Deleted allocations for instance 91a76cc7-7f82-42cf-a379-fc0ba3d04568 [ 1831.347250] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ab937-0c52-0e93-4c57-65560e4d1981/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1831.349240] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e953f73-5cb7-4d12-ab4b-cf9624d75cef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.357692] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ab937-0c52-0e93-4c57-65560e4d1981/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1831.357692] env[62510]: ERROR oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ab937-0c52-0e93-4c57-65560e4d1981/disk-0.vmdk due to incomplete transfer. [ 1831.357944] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d3bf7a14-d09f-4d66-b630-b52b82f776b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.370145] env[62510]: DEBUG oslo_vmware.rw_handles [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523ab937-0c52-0e93-4c57-65560e4d1981/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1831.370145] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Uploaded image f76254e3-6418-4890-b818-183d24e89b62 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1831.372571] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1831.372898] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-51852cea-06a8-40e4-b256-527464b580f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.386744] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769325, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.388954] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1831.388954] env[62510]: value = "task-1769326" [ 1831.388954] env[62510]: _type = "Task" [ 1831.388954] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.401760] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769326, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.554509] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d67d8b0a-dffc-4f7e-b0e2-3f3d83bddcc7 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.348s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.763664] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] There are 51 instances to clean {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 1831.767184] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 1dc9e3b6-5e75-49b4-aef0-01200fb9be47] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1831.773768] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24db7559-1068-4303-9adc-d721ae9baeeb tempest-ImagesOneServerTestJSON-1086547322 tempest-ImagesOneServerTestJSON-1086547322-project-member] Lock "91a76cc7-7f82-42cf-a379-fc0ba3d04568" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.478s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.885667] env[62510]: DEBUG oslo_vmware.api [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769325, 'name': PowerOnVM_Task, 'duration_secs': 0.707007} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.886436] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1831.887040] env[62510]: INFO nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Took 9.78 seconds to spawn the instance on the hypervisor. [ 1831.887467] env[62510]: DEBUG nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1831.888671] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323ebee8-e6fd-4952-91ad-d006f22a7cf8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.904113] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769326, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.268552] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: f40078f0-af6b-480b-96e6-4117022c87e2] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1832.414490] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769326, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.420792] env[62510]: INFO nova.compute.manager [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Took 28.95 seconds to build instance. [ 1832.482888] env[62510]: INFO nova.compute.manager [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Rebuilding instance [ 1832.551555] env[62510]: DEBUG nova.compute.manager [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1832.552535] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854baa74-ff62-4159-b59f-512ed8669ee9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.599342] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca80c4b-e0fb-4f73-94a4-5c06b00659bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.608617] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e44c83-44df-474a-828d-99b112b73392 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.646331] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc021b-1f3a-4f4a-8096-8d8af0900f86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.655928] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a8dc17-470d-4a94-a08f-d940c0682b55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.671593] env[62510]: DEBUG nova.compute.provider_tree [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1832.780050] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9373089f-dbd4-4ac9-8736-e4c929fe6fb0] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1832.903226] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769326, 'name': Destroy_Task, 'duration_secs': 1.244269} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.904344] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Destroyed the VM [ 1832.904344] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1832.904344] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cc581f27-0a46-4261-830b-f84fb1068656 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.913405] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1832.913405] env[62510]: value = "task-1769327" [ 1832.913405] env[62510]: _type = "Task" [ 1832.913405] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.927849] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d15ab74b-0fd3-4a92-bc1a-6591b9f4f632 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.473s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.928409] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769327, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.174349] env[62510]: DEBUG nova.scheduler.client.report [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1833.284036] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: dabc046f-10f5-43d8-90f8-507dcb4d0144] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1833.436039] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769327, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.576038] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1833.576038] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac23afff-8472-451f-87ab-8ea5bd62d089 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.584907] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1833.584907] env[62510]: value = "task-1769328" [ 1833.584907] env[62510]: _type = "Task" [ 1833.584907] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.597116] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769328, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.681721] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.682102] env[62510]: DEBUG nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1833.684976] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.816s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.686874] env[62510]: INFO nova.compute.claims [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1833.785689] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8b079310-084b-4ba0-8a82-57d64f421c11] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1833.932398] env[62510]: DEBUG oslo_vmware.api [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769327, 'name': RemoveSnapshot_Task, 'duration_secs': 0.642766} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.932762] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1833.932762] env[62510]: INFO nova.compute.manager [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Took 15.35 seconds to snapshot the instance on the hypervisor. [ 1834.097872] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769328, 'name': PowerOffVM_Task, 'duration_secs': 0.341509} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.100033] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1834.177113] env[62510]: INFO nova.compute.manager [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Detaching volume b844eb5c-7644-4bb5-900e-d0a16620fbe8 [ 1834.195865] env[62510]: DEBUG nova.compute.utils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1834.198209] env[62510]: DEBUG nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1834.198400] env[62510]: DEBUG nova.network.neutron [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1834.229704] env[62510]: INFO nova.virt.block_device [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Attempting to driver detach volume b844eb5c-7644-4bb5-900e-d0a16620fbe8 from mountpoint /dev/sdb [ 1834.230040] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1834.230236] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1834.231444] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c51191-5fa3-45e8-a0e8-38464b7676bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.262822] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcb1284-43b1-4e3a-aabd-0e23510b0d7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.272907] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a76a89-c95a-40ff-89e8-cd3dbefd4884 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.297384] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 144052ab-e3e7-401f-9edb-d8088780e468] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1834.302269] env[62510]: DEBUG nova.policy [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82a0a2fa8df14cbb9f8b7fbd19ef4a4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69511bceaf9c432c8819574d05584f09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1834.304713] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8ef38b-9091-48ab-b6a9-71936e39c03e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.326609] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] The volume has not been displaced from its original location: [datastore1] volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8/volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1834.333955] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1834.335725] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fce20b7e-75f5-44cb-adc4-b14908bb8169 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.362112] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1834.362112] env[62510]: value = "task-1769329" [ 1834.362112] env[62510]: _type = "Task" [ 1834.362112] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.375721] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769329, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.500710] env[62510]: DEBUG nova.compute.manager [None req-90d908c5-b958-434c-a6be-2ce1630346d3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Found 2 images (rotation: 2) {{(pid=62510) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1834.569854] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "ebd2dc4b-8d74-47db-861e-870d41a4150b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.570176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.571027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "ebd2dc4b-8d74-47db-861e-870d41a4150b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.571027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.571027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.575988] env[62510]: INFO nova.compute.manager [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Terminating instance [ 1834.668536] env[62510]: DEBUG nova.network.neutron [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Successfully created port: 53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1834.702245] env[62510]: DEBUG nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1834.802298] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 774ea198-c933-449a-8380-2e4cc9327389] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1834.876087] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769329, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.921125] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523db917-a835-adf3-ef45-a82e7bbb8439/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1834.922456] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efb497e-7b88-47c3-beca-4f43cc5688ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.932718] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523db917-a835-adf3-ef45-a82e7bbb8439/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1834.933018] env[62510]: ERROR oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523db917-a835-adf3-ef45-a82e7bbb8439/disk-0.vmdk due to incomplete transfer. [ 1834.933167] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-624e1581-956d-49f1-84c2-05dfb4c2e43b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.944418] env[62510]: DEBUG oslo_vmware.rw_handles [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523db917-a835-adf3-ef45-a82e7bbb8439/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1834.944705] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Uploaded image 3fe87b83-b1ae-4d1f-8939-42b9e3d5d598 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1834.946397] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1834.946681] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-54846027-d5ec-42e6-b54b-cf3b6ca5e95f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.955821] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1834.955821] env[62510]: value = "task-1769330" [ 1834.955821] env[62510]: _type = "Task" [ 1834.955821] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.965888] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769330, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.054408] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69df74a3-3048-47c8-9585-af90e1b4f465 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.063102] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f023be1e-a72f-413d-a72f-db8879ff3027 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.100068] env[62510]: DEBUG nova.compute.manager [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1835.100442] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1835.101875] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1f231e-b994-426a-b436-45845ac452ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.105965] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff1efd2e-5396-49e1-8edf-7e24e421907f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.117201] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c36b0b-60b3-45ba-a3d2-9b764fc66662 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.121906] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1835.122568] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1624c01-7a19-4115-b1d8-968cc8950be8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.138638] env[62510]: DEBUG nova.compute.provider_tree [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.144298] env[62510]: DEBUG oslo_vmware.api [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1835.144298] env[62510]: value = "task-1769331" [ 1835.144298] env[62510]: _type = "Task" [ 1835.144298] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.154293] env[62510]: DEBUG oslo_vmware.api [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769331, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.241340] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.241590] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.309201] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 77f485ae-9c4c-424e-8bac-6d023e428767] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1835.379575] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769329, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.467030] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769330, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.644044] env[62510]: DEBUG nova.compute.manager [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1835.644044] env[62510]: DEBUG nova.scheduler.client.report [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1835.647107] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0613f1-2778-4258-861e-b12ae4d88719 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.666639] env[62510]: DEBUG oslo_vmware.api [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769331, 'name': PowerOffVM_Task, 'duration_secs': 0.216282} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.667614] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1835.667792] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1835.668060] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ff66c91-68f2-4416-8921-92e4465a511c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.717041] env[62510]: DEBUG nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1835.740602] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1835.740851] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1835.741013] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1835.741214] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1835.741362] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1835.741531] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1835.741731] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1835.741880] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1835.742060] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1835.742224] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1835.742395] env[62510]: DEBUG nova.virt.hardware [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1835.743257] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc49c3e4-0e3a-4129-b27f-2dd5f7ec7aa1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.745959] env[62510]: DEBUG nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1835.754499] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0a19a6-07e8-4b3a-9a1e-13f42ae7cefc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.813623] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2dce738b-9624-4a74-8b8c-042e45b693b0] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1835.834608] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1835.835386] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1835.835386] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleting the datastore file [datastore1] ebd2dc4b-8d74-47db-861e-870d41a4150b {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.835386] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb26e2f5-5366-4bc4-a722-2d1e0d16af5e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.843819] env[62510]: DEBUG oslo_vmware.api [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for the task: (returnval){ [ 1835.843819] env[62510]: value = "task-1769333" [ 1835.843819] env[62510]: _type = "Task" [ 1835.843819] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.852731] env[62510]: DEBUG oslo_vmware.api [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.877310] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769329, 'name': ReconfigVM_Task, 'duration_secs': 1.290463} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.877664] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1835.883021] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b5c83c1-ff97-45f7-9f0d-2a1a5222caa1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.899238] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1835.899238] env[62510]: value = "task-1769334" [ 1835.899238] env[62510]: _type = "Task" [ 1835.899238] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.907979] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769334, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.967802] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769330, 'name': Destroy_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.149198] env[62510]: DEBUG nova.compute.manager [req-53b77b07-3ccb-4e5b-87b0-035988879a80 req-537c49ef-7796-4ba8-82be-ee3b975ac309 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Received event network-vif-plugged-53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1836.149198] env[62510]: DEBUG oslo_concurrency.lockutils [req-53b77b07-3ccb-4e5b-87b0-035988879a80 req-537c49ef-7796-4ba8-82be-ee3b975ac309 service nova] Acquiring lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.149828] env[62510]: DEBUG oslo_concurrency.lockutils [req-53b77b07-3ccb-4e5b-87b0-035988879a80 req-537c49ef-7796-4ba8-82be-ee3b975ac309 service nova] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.150154] env[62510]: DEBUG oslo_concurrency.lockutils [req-53b77b07-3ccb-4e5b-87b0-035988879a80 req-537c49ef-7796-4ba8-82be-ee3b975ac309 service nova] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.150461] env[62510]: DEBUG nova.compute.manager [req-53b77b07-3ccb-4e5b-87b0-035988879a80 req-537c49ef-7796-4ba8-82be-ee3b975ac309 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] No waiting events found dispatching network-vif-plugged-53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1836.150745] env[62510]: WARNING nova.compute.manager [req-53b77b07-3ccb-4e5b-87b0-035988879a80 req-537c49ef-7796-4ba8-82be-ee3b975ac309 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Received unexpected event network-vif-plugged-53f991bc-ec2f-434b-8943-f8e6d891b608 for instance with vm_state building and task_state spawning. [ 1836.151943] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.152517] env[62510]: DEBUG nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1836.155039] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.116s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.159353] env[62510]: DEBUG nova.objects.instance [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1836.168283] env[62510]: INFO nova.compute.manager [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] instance snapshotting [ 1836.169044] env[62510]: DEBUG nova.objects.instance [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'flavor' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1836.245816] env[62510]: DEBUG nova.network.neutron [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Successfully updated port: 53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1836.269311] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.318182] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: b7ffe11f-2f63-419b-9ad8-0a89a05d201c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1836.355473] env[62510]: DEBUG oslo_vmware.api [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Task: {'id': task-1769333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141738} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.355968] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1836.356174] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1836.356354] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1836.356526] env[62510]: INFO nova.compute.manager [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1836.356767] env[62510]: DEBUG oslo.service.loopingcall [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1836.356960] env[62510]: DEBUG nova.compute.manager [-] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1836.357069] env[62510]: DEBUG nova.network.neutron [-] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1836.415603] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769334, 'name': ReconfigVM_Task, 'duration_secs': 0.332735} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.415937] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1836.467865] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769330, 'name': Destroy_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.665670] env[62510]: DEBUG nova.compute.utils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1836.669305] env[62510]: DEBUG nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1836.669721] env[62510]: DEBUG nova.network.neutron [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1836.674957] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33396daf-23b2-45ee-9e00-0f0300efae7f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.697721] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078b310e-9927-4220-b37c-bc6bfa671e04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.723243] env[62510]: DEBUG nova.policy [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97a7f1ca55d549a3985e95b6bbc665f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94a46473611d4b22be7c66c909d1b348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1836.753898] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.754070] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquired lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.754230] env[62510]: DEBUG nova.network.neutron [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1836.821121] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 1d644c4f-1fd4-4251-aeef-5777d3f4b94c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1836.971722] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769330, 'name': Destroy_Task, 'duration_secs': 1.580907} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.974566] env[62510]: DEBUG nova.network.neutron [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Successfully created port: b9d1d288-41c0-4355-a940-4e80836ad286 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1836.977510] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Destroyed the VM [ 1836.977763] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1836.979573] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ed1de4b4-e8a3-44b2-a53f-8bb335c9f53a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.991020] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1836.991020] env[62510]: value = "task-1769335" [ 1836.991020] env[62510]: _type = "Task" [ 1836.991020] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.001113] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769335, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.103060] env[62510]: DEBUG nova.network.neutron [-] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.169748] env[62510]: DEBUG nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1837.174155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-710e3bd6-ebba-43ff-8f26-9d6aa80e1f79 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.174831] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.220s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.174831] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.181643] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.851s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.181841] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.183714] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.875s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.187563] env[62510]: INFO nova.compute.claims [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1837.209140] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1837.211132] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-144159e6-dee7-4224-97a7-73fb6f887b70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.218428] env[62510]: INFO nova.scheduler.client.report [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleted allocations for instance 1e3e2044-a072-454f-85ba-5cb0bc36b5fd [ 1837.223607] env[62510]: INFO nova.scheduler.client.report [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Deleted allocations for instance c8e69231-2786-47ac-9a44-c194088b8079 [ 1837.226396] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1837.226396] env[62510]: value = "task-1769336" [ 1837.226396] env[62510]: _type = "Task" [ 1837.226396] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.240190] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769336, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.305630] env[62510]: DEBUG nova.network.neutron [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1837.324520] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: c829d602-97bc-4ec8-9090-c63bed04ac79] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1837.483437] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1837.483817] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6de0ad65-a54b-4273-a1fa-af8c33d21eb7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.495461] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1837.495461] env[62510]: value = "task-1769337" [ 1837.495461] env[62510]: _type = "Task" [ 1837.495461] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.502652] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769335, 'name': RemoveSnapshot_Task, 'duration_secs': 0.340782} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.503345] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1837.503750] env[62510]: DEBUG nova.compute.manager [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1837.504611] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-128cf6ae-03ec-4169-8541-e0edc50534c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.510759] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1837.510942] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1837.511148] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1837.511898] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f65e88-9e12-40a5-9d74-b299ac8d660d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.536815] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a217e3c-62e6-4d4c-b76c-a049a61ed3bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.545280] env[62510]: WARNING nova.virt.vmwareapi.driver [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1837.545648] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1837.547652] env[62510]: DEBUG nova.network.neutron [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.550938] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6fca3d-e889-43a8-a9c2-0ba68d975b87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.559127] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1837.559127] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8458f964-fbe1-42d1-8780-8362bc3ccd44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.605484] env[62510]: INFO nova.compute.manager [-] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Took 1.25 seconds to deallocate network for instance. [ 1837.730474] env[62510]: DEBUG oslo_concurrency.lockutils [None req-546d308d-e331-408c-99ff-364b34efab00 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "1e3e2044-a072-454f-85ba-5cb0bc36b5fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.691s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.738646] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9a3169f-07d0-46bb-9449-1c81a3e49604 tempest-ServersV294TestFqdnHostnames-1929242530 tempest-ServersV294TestFqdnHostnames-1929242530-project-member] Lock "c8e69231-2786-47ac-9a44-c194088b8079" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.794s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.741709] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1837.741983] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1837.742227] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleting the datastore file [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1837.746480] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-099a20cf-fd41-4978-a533-20135868e494 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.748493] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769336, 'name': CreateSnapshot_Task, 'duration_secs': 0.449855} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.748982] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1837.750483] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7187b0-d011-4eca-afb8-08e1d349e17c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.757348] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1837.757348] env[62510]: value = "task-1769339" [ 1837.757348] env[62510]: _type = "Task" [ 1837.757348] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.772201] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769339, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.833027] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 16b5d928-94fe-4fd5-9909-775c28d7edd2] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1838.022576] env[62510]: INFO nova.compute.manager [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Shelve offloading [ 1838.054626] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Releasing lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1838.054962] env[62510]: DEBUG nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Instance network_info: |[{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1838.055439] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:3a:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53f991bc-ec2f-434b-8943-f8e6d891b608', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1838.063445] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Creating folder: Project (69511bceaf9c432c8819574d05584f09). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1838.063728] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9446b2b0-5c7d-4b97-854d-93dabee4554b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.078121] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Created folder: Project (69511bceaf9c432c8819574d05584f09) in parent group-v367197. [ 1838.078426] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Creating folder: Instances. Parent ref: group-v367448. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1838.078863] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4898cf1c-3b7d-4c4a-a7bc-fe3fd7224034 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.090183] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Created folder: Instances in parent group-v367448. [ 1838.090430] env[62510]: DEBUG oslo.service.loopingcall [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1838.090633] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1838.090846] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17f3d8cc-83fc-424f-8555-09eb793af08d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.112328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.126024] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1838.126024] env[62510]: value = "task-1769342" [ 1838.126024] env[62510]: _type = "Task" [ 1838.126024] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.131994] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769342, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.198152] env[62510]: DEBUG nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1838.230010] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1838.230279] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1838.230478] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1838.230885] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1838.231059] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1838.231214] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1838.231429] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1838.231590] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1838.231753] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1838.232055] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1838.232099] env[62510]: DEBUG nova.virt.hardware [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1838.235732] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e647227a-02f3-4592-8897-94a2105dc62f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.246746] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3504a252-d818-414e-b41b-f8d2e303dd5b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.278487] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1838.278487] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6bab6ba0-e885-4e91-972b-fa4c94dac4e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.288480] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769339, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285714} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.289175] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1838.289175] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1838.289175] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1838.293370] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1838.293370] env[62510]: value = "task-1769343" [ 1838.293370] env[62510]: _type = "Task" [ 1838.293370] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.304935] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769343, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.340550] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 31fe5643-dece-484f-92d6-7c7cafbd51e4] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1838.424842] env[62510]: DEBUG nova.compute.manager [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Received event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1838.425610] env[62510]: DEBUG nova.compute.manager [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing instance network info cache due to event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1838.425610] env[62510]: DEBUG oslo_concurrency.lockutils [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] Acquiring lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.425610] env[62510]: DEBUG oslo_concurrency.lockutils [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] Acquired lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.425960] env[62510]: DEBUG nova.network.neutron [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1838.526828] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1838.528660] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b9b3a38-54a5-4343-b503-c411d5e187a4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.543066] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1838.543066] env[62510]: value = "task-1769344" [ 1838.543066] env[62510]: _type = "Task" [ 1838.543066] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.556213] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833a67ba-247d-4596-8998-2a5f2458575f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.564099] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1838.564348] env[62510]: DEBUG nova.compute.manager [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1838.565414] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ce9ef9-b6f4-4b29-b50e-ee05aff6f547 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.571838] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd9551e-21e6-4093-b45e-6f5108c844d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.579052] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.579286] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.579409] env[62510]: DEBUG nova.network.neutron [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1838.610820] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da11a35-4fab-4127-8a87-28ad0324a381 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.622228] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeff39bb-01cd-4f3a-9022-8a7c16c0a192 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.640567] env[62510]: DEBUG nova.compute.provider_tree [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.644986] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769342, 'name': CreateVM_Task, 'duration_secs': 0.434974} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.645346] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1838.645995] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.646172] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.646494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1838.646740] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f80228ae-1aa4-492f-8d9c-11487c476a6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.654799] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1838.654799] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a9c08d-340c-192f-ae80-a93396856795" [ 1838.654799] env[62510]: _type = "Task" [ 1838.654799] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.664453] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a9c08d-340c-192f-ae80-a93396856795, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.801420] env[62510]: INFO nova.virt.block_device [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Booting with volume b844eb5c-7644-4bb5-900e-d0a16620fbe8 at /dev/sdb [ 1838.809746] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769343, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.842020] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: d1c20183-ba24-4a11-ad82-bf240d581322] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1838.844859] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5ecc67f-810f-4d7c-a1dc-0e71e4c40984 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.856748] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176f2967-1c67-4dcd-a06b-048e506a4a79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.892790] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-069dec15-7899-4261-ab73-7e4f37ccd04e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.905318] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3805696-4540-4f99-8ba6-124b5615b535 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.946798] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd5ec14-ded0-41f1-8d77-41e7d96e56fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.956113] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb335578-38d0-4029-82ed-d975f1dcb15d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.972021] env[62510]: DEBUG nova.virt.block_device [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updating existing volume attachment record: 13f58a82-3e10-4d5d-a082-d867ef1ff1b7 {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1839.010890] env[62510]: DEBUG nova.network.neutron [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Successfully updated port: b9d1d288-41c0-4355-a940-4e80836ad286 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1839.117077] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "31772dc9-4f04-42df-9e3b-3200cc72c977" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.117453] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "31772dc9-4f04-42df-9e3b-3200cc72c977" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.117756] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "31772dc9-4f04-42df-9e3b-3200cc72c977-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.117962] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "31772dc9-4f04-42df-9e3b-3200cc72c977-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.118196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "31772dc9-4f04-42df-9e3b-3200cc72c977-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.120524] env[62510]: INFO nova.compute.manager [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Terminating instance [ 1839.147190] env[62510]: DEBUG nova.scheduler.client.report [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1839.174189] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a9c08d-340c-192f-ae80-a93396856795, 'name': SearchDatastore_Task, 'duration_secs': 0.013043} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.174553] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.174833] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1839.175112] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.175279] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.175463] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1839.175750] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7f4d0f5-01cc-4550-98f2-1090a371c957 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.188408] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1839.188626] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1839.189432] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f4965e0-c1f9-437a-a6b4-c89bf3401ea8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.192744] env[62510]: DEBUG nova.network.neutron [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updated VIF entry in instance network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1839.192994] env[62510]: DEBUG nova.network.neutron [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.201723] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1839.201723] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f05fe7-c0f4-ccf3-26cb-6cb2f6d4fdee" [ 1839.201723] env[62510]: _type = "Task" [ 1839.201723] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.213708] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f05fe7-c0f4-ccf3-26cb-6cb2f6d4fdee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.309190] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769343, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.351495] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0082eb97-26e9-4196-b8e3-63460d32dd19] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1839.516124] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "refresh_cache-e7e053be-cb88-4ae0-b157-3006211f77d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.516124] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "refresh_cache-e7e053be-cb88-4ae0-b157-3006211f77d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.517164] env[62510]: DEBUG nova.network.neutron [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1839.519325] env[62510]: DEBUG nova.network.neutron [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Updating instance_info_cache with network_info: [{"id": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "address": "fa:16:3e:cd:c0:e6", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8e2da8e-86", "ovs_interfaceid": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.624159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "refresh_cache-31772dc9-4f04-42df-9e3b-3200cc72c977" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.624159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquired lock "refresh_cache-31772dc9-4f04-42df-9e3b-3200cc72c977" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.628023] env[62510]: DEBUG nova.network.neutron [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1839.652648] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.653397] env[62510]: DEBUG nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1839.656210] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.399s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.658174] env[62510]: INFO nova.compute.claims [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1839.696171] env[62510]: DEBUG oslo_concurrency.lockutils [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] Releasing lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.696306] env[62510]: DEBUG nova.compute.manager [req-bef10aa3-cf70-47e5-8260-a4f1a8a494d9 req-94f9a5ef-8941-407a-be5e-f3ecea88fcb0 service nova] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Received event network-vif-deleted-01ac60bf-a53d-4e8f-a7ff-3329360878af {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1839.713044] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f05fe7-c0f4-ccf3-26cb-6cb2f6d4fdee, 'name': SearchDatastore_Task, 'duration_secs': 0.013381} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.713995] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94af31ec-8aa9-4e59-ace7-6520ac4c5992 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.721472] env[62510]: DEBUG oslo_concurrency.lockutils [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.721735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.721973] env[62510]: INFO nova.compute.manager [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Rebooting instance [ 1839.723314] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1839.723314] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a3e978-f4e5-4fb2-9450-f947b40833dc" [ 1839.723314] env[62510]: _type = "Task" [ 1839.723314] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.732959] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a3e978-f4e5-4fb2-9450-f947b40833dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.805687] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769343, 'name': CloneVM_Task, 'duration_secs': 1.375301} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.806013] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Created linked-clone VM from snapshot [ 1839.806787] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7776f854-efac-482b-91ae-6bebbee4ffdd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.814693] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Uploading image 2c220807-cc2e-49b9-90e8-39effcef5fb1 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1839.843609] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1839.843609] env[62510]: value = "vm-367451" [ 1839.843609] env[62510]: _type = "VirtualMachine" [ 1839.843609] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1839.843934] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9419f85d-8bc6-4301-9dd4-a47661ebfd41 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.852888] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 90869287-22bd-438c-8684-56f5d43e3ca8] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1839.855207] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease: (returnval){ [ 1839.855207] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52065680-339b-f4ed-e7bc-8c1a678878e5" [ 1839.855207] env[62510]: _type = "HttpNfcLease" [ 1839.855207] env[62510]: } obtained for exporting VM: (result){ [ 1839.855207] env[62510]: value = "vm-367451" [ 1839.855207] env[62510]: _type = "VirtualMachine" [ 1839.855207] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1839.855207] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the lease: (returnval){ [ 1839.855207] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52065680-339b-f4ed-e7bc-8c1a678878e5" [ 1839.855207] env[62510]: _type = "HttpNfcLease" [ 1839.855207] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1839.862344] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1839.862344] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52065680-339b-f4ed-e7bc-8c1a678878e5" [ 1839.862344] env[62510]: _type = "HttpNfcLease" [ 1839.862344] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1840.025310] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.080123] env[62510]: DEBUG nova.network.neutron [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1840.154904] env[62510]: DEBUG nova.network.neutron [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1840.166698] env[62510]: DEBUG nova.compute.utils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1840.173150] env[62510]: DEBUG nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1840.173150] env[62510]: DEBUG nova.network.neutron [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1840.237506] env[62510]: DEBUG nova.policy [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aa4c3fa8aa6141558d7eb16e0e726b96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68579c8354b4431e8ec51575cda77325', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1840.239723] env[62510]: DEBUG nova.network.neutron [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.241907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.241907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.241907] env[62510]: DEBUG nova.network.neutron [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1840.256366] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a3e978-f4e5-4fb2-9450-f947b40833dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011241} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.256726] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.257084] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 01204162-bf8e-46e0-bcf4-00df9ed7e7ce/01204162-bf8e-46e0-bcf4-00df9ed7e7ce.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1840.258502] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-410be55d-b09a-4864-a3d1-a23d2de9a1f1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.267620] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1840.267620] env[62510]: value = "task-1769346" [ 1840.267620] env[62510]: _type = "Task" [ 1840.267620] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.277469] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.278904] env[62510]: DEBUG nova.network.neutron [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Updating instance_info_cache with network_info: [{"id": "b9d1d288-41c0-4355-a940-4e80836ad286", "address": "fa:16:3e:4e:ba:9d", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d1d288-41", "ovs_interfaceid": "b9d1d288-41c0-4355-a940-4e80836ad286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.356839] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4d622ed5-5f6f-46ca-bc4a-efb32f452cb7] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1840.373550] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1840.373550] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52065680-339b-f4ed-e7bc-8c1a678878e5" [ 1840.373550] env[62510]: _type = "HttpNfcLease" [ 1840.373550] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1840.373896] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1840.373896] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52065680-339b-f4ed-e7bc-8c1a678878e5" [ 1840.373896] env[62510]: _type = "HttpNfcLease" [ 1840.373896] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1840.374787] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bac5d97-6d27-4bab-9f6e-cb802d2e759e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.387084] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dfec6-6a71-4196-fed0-3969596804d7/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1840.387347] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dfec6-6a71-4196-fed0-3969596804d7/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1840.462672] env[62510]: DEBUG nova.compute.manager [req-7cc79f6a-1fb1-4dd1-a414-9b168a377ca1 req-e943337a-8377-47d2-aa0f-8323650e10ff service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Received event network-vif-unplugged-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1840.462889] env[62510]: DEBUG oslo_concurrency.lockutils [req-7cc79f6a-1fb1-4dd1-a414-9b168a377ca1 req-e943337a-8377-47d2-aa0f-8323650e10ff service nova] Acquiring lock "313f7916-0737-4e44-ae2f-58301934bf06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.463110] env[62510]: DEBUG oslo_concurrency.lockutils [req-7cc79f6a-1fb1-4dd1-a414-9b168a377ca1 req-e943337a-8377-47d2-aa0f-8323650e10ff service nova] Lock "313f7916-0737-4e44-ae2f-58301934bf06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.463359] env[62510]: DEBUG oslo_concurrency.lockutils [req-7cc79f6a-1fb1-4dd1-a414-9b168a377ca1 req-e943337a-8377-47d2-aa0f-8323650e10ff service nova] Lock "313f7916-0737-4e44-ae2f-58301934bf06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.464215] env[62510]: DEBUG nova.compute.manager [req-7cc79f6a-1fb1-4dd1-a414-9b168a377ca1 req-e943337a-8377-47d2-aa0f-8323650e10ff service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] No waiting events found dispatching network-vif-unplugged-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1840.464215] env[62510]: WARNING nova.compute.manager [req-7cc79f6a-1fb1-4dd1-a414-9b168a377ca1 req-e943337a-8377-47d2-aa0f-8323650e10ff service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Received unexpected event network-vif-unplugged-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 for instance with vm_state shelved and task_state shelving_offloading. [ 1840.509832] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d16c8863-839d-44ff-9943-a22303d07bb5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.564116] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1840.565261] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e56157f-6236-476a-9931-f7e066ad1d33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.579291] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1840.580043] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de712fa7-0da1-4564-9e55-1e12d2d53f86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.671507] env[62510]: DEBUG nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1840.675418] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1840.675418] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1840.675418] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleting the datastore file [datastore1] 313f7916-0737-4e44-ae2f-58301934bf06 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1840.679702] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c566bd74-2ea4-45fd-8878-a66463ecd963 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.692091] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1840.692091] env[62510]: value = "task-1769348" [ 1840.692091] env[62510]: _type = "Task" [ 1840.692091] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.711317] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.744663] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Releasing lock "refresh_cache-31772dc9-4f04-42df-9e3b-3200cc72c977" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.745348] env[62510]: DEBUG nova.compute.manager [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1840.745611] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1840.749204] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d5e5dc-3c96-4bfb-9199-fcc906bb9802 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.766525] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1840.769026] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3c4dfc1-eca7-45d2-823c-8531707ca1f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.783657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "refresh_cache-e7e053be-cb88-4ae0-b157-3006211f77d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.784147] env[62510]: DEBUG nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Instance network_info: |[{"id": "b9d1d288-41c0-4355-a940-4e80836ad286", "address": "fa:16:3e:4e:ba:9d", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d1d288-41", "ovs_interfaceid": "b9d1d288-41c0-4355-a940-4e80836ad286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1840.784498] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769346, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514399} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.786441] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:ba:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9d1d288-41c0-4355-a940-4e80836ad286', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1840.797943] env[62510]: DEBUG oslo.service.loopingcall [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.797943] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 01204162-bf8e-46e0-bcf4-00df9ed7e7ce/01204162-bf8e-46e0-bcf4-00df9ed7e7ce.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1840.797943] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1840.797943] env[62510]: DEBUG oslo_vmware.api [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1840.797943] env[62510]: value = "task-1769349" [ 1840.797943] env[62510]: _type = "Task" [ 1840.797943] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.800342] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1840.800886] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1fdd3158-72b4-469f-82c7-2b90da34aa65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.807404] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c055e70-213d-46bf-87af-58755e3266d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.827510] env[62510]: DEBUG nova.compute.manager [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Received event network-vif-plugged-b9d1d288-41c0-4355-a940-4e80836ad286 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1840.827775] env[62510]: DEBUG oslo_concurrency.lockutils [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] Acquiring lock "e7e053be-cb88-4ae0-b157-3006211f77d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.830331] env[62510]: DEBUG oslo_concurrency.lockutils [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.830331] env[62510]: DEBUG oslo_concurrency.lockutils [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.830331] env[62510]: DEBUG nova.compute.manager [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] No waiting events found dispatching network-vif-plugged-b9d1d288-41c0-4355-a940-4e80836ad286 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1840.830331] env[62510]: WARNING nova.compute.manager [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Received unexpected event network-vif-plugged-b9d1d288-41c0-4355-a940-4e80836ad286 for instance with vm_state building and task_state spawning. [ 1840.830331] env[62510]: DEBUG nova.compute.manager [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Received event network-changed-b9d1d288-41c0-4355-a940-4e80836ad286 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1840.830535] env[62510]: DEBUG nova.compute.manager [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Refreshing instance network info cache due to event network-changed-b9d1d288-41c0-4355-a940-4e80836ad286. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1840.830657] env[62510]: DEBUG oslo_concurrency.lockutils [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] Acquiring lock "refresh_cache-e7e053be-cb88-4ae0-b157-3006211f77d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.830817] env[62510]: DEBUG oslo_concurrency.lockutils [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] Acquired lock "refresh_cache-e7e053be-cb88-4ae0-b157-3006211f77d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.830989] env[62510]: DEBUG nova.network.neutron [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Refreshing network info cache for port b9d1d288-41c0-4355-a940-4e80836ad286 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1840.844051] env[62510]: DEBUG oslo_vmware.api [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.847790] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1840.847790] env[62510]: value = "task-1769351" [ 1840.847790] env[62510]: _type = "Task" [ 1840.847790] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.848638] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1840.848638] env[62510]: value = "task-1769350" [ 1840.848638] env[62510]: _type = "Task" [ 1840.848638] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.868456] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 3df19233-2448-4030-ae1d-a4f98ccffba9] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1840.881035] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769350, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.881035] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769351, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.005182] env[62510]: DEBUG nova.network.neutron [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Successfully created port: 965c7e31-fbcc-4660-900b-d657b5aa8abb {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1841.084391] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1e821a-c564-4676-9f1a-af5a0eb73bb4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.093450] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0f01c2-56b5-437c-b2c9-8bc091fc7f83 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.135046] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1841.135730] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1841.135730] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1841.135903] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1841.136079] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1841.136295] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1841.136516] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1841.136681] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1841.136846] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1841.137015] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1841.137204] env[62510]: DEBUG nova.virt.hardware [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1841.140457] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391f84a4-0dcd-4c81-af13-8d18e40e37bc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.143613] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3af5472-7a92-4270-ad3c-41362f6ede3b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.155495] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfcec59-bc4d-4d98-aa43-d44277c69324 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.161054] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f84bdbf-943c-46ba-8407-14a65f1202e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.181420] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:17:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4568ba9b-dd3d-4796-bcfc-7bf80545a66b', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1841.189440] env[62510]: DEBUG oslo.service.loopingcall [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.198431] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1841.199074] env[62510]: DEBUG nova.compute.provider_tree [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.200618] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21c4bfc6-3f9a-4d95-8864-21e12fc680a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.222324] env[62510]: DEBUG nova.scheduler.client.report [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1841.232627] env[62510]: DEBUG oslo_vmware.api [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.219968} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.234267] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1841.234807] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1841.234807] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1841.237485] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1841.237485] env[62510]: value = "task-1769352" [ 1841.237485] env[62510]: _type = "Task" [ 1841.237485] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.255179] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769352, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.269304] env[62510]: INFO nova.scheduler.client.report [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted allocations for instance 313f7916-0737-4e44-ae2f-58301934bf06 [ 1841.315070] env[62510]: DEBUG oslo_vmware.api [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769349, 'name': PowerOffVM_Task, 'duration_secs': 0.222817} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.315423] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1841.315590] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1841.316217] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43b83508-1ef0-4eb4-bbf3-072a010a770b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.349587] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1841.349587] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1841.349587] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleting the datastore file [datastore1] 31772dc9-4f04-42df-9e3b-3200cc72c977 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1841.349587] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1040fcc-6a82-4aa1-86a2-d462bba86d07 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.363060] env[62510]: DEBUG oslo_vmware.api [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for the task: (returnval){ [ 1841.363060] env[62510]: value = "task-1769354" [ 1841.363060] env[62510]: _type = "Task" [ 1841.363060] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.371789] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769350, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082702} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.372360] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769351, 'name': CreateVM_Task, 'duration_secs': 0.404912} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.378322] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1841.378618] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1841.381602] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ebd660-adb2-4a61-a183-4b305a35a71b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.384741] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.386027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.386027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1841.386141] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0d27da5c-20f3-4df1-86d2-036c904fd657] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1841.388759] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9e7baef-5114-4541-b885-45591ff5ffc8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.395862] env[62510]: DEBUG oslo_vmware.api [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.419848] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 01204162-bf8e-46e0-bcf4-00df9ed7e7ce/01204162-bf8e-46e0-bcf4-00df9ed7e7ce.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1841.421896] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba34dbf8-cd57-448c-b2de-7e1730d448de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.437369] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1841.437369] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522307cc-1293-81ee-dff8-faee6abb7ba0" [ 1841.437369] env[62510]: _type = "Task" [ 1841.437369] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.448087] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1841.448087] env[62510]: value = "task-1769355" [ 1841.448087] env[62510]: _type = "Task" [ 1841.448087] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.455700] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522307cc-1293-81ee-dff8-faee6abb7ba0, 'name': SearchDatastore_Task, 'duration_secs': 0.012983} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.457517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.457517] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1841.457517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.457517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.457683] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1841.458474] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e067f32-6ef1-43d3-88e3-a67aa07a7971 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.464141] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769355, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.473752] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1841.473752] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1841.474201] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-827e788c-0b35-4486-a0c2-786e8ff44076 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.481800] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1841.481800] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5290c3c8-d3a1-05ec-1c05-09e7a3f3ee04" [ 1841.481800] env[62510]: _type = "Task" [ 1841.481800] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.493142] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5290c3c8-d3a1-05ec-1c05-09e7a3f3ee04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.500059] env[62510]: DEBUG nova.network.neutron [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.644325] env[62510]: DEBUG nova.network.neutron [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Updated VIF entry in instance network info cache for port b9d1d288-41c0-4355-a940-4e80836ad286. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1841.644769] env[62510]: DEBUG nova.network.neutron [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Updating instance_info_cache with network_info: [{"id": "b9d1d288-41c0-4355-a940-4e80836ad286", "address": "fa:16:3e:4e:ba:9d", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9d1d288-41", "ovs_interfaceid": "b9d1d288-41c0-4355-a940-4e80836ad286", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.700223] env[62510]: DEBUG nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1841.728814] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.072s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.729482] env[62510]: DEBUG nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1841.732718] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.838s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.737026] env[62510]: INFO nova.compute.claims [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1841.752302] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769352, 'name': CreateVM_Task, 'duration_secs': 0.473189} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.752462] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1841.753158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.753320] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.753798] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1841.754085] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e78f1557-bd00-49d4-be9d-9a0f004c7a9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.759968] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1841.759968] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ae5a74-bab6-ca19-4354-af248d5a4b1e" [ 1841.759968] env[62510]: _type = "Task" [ 1841.759968] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.769374] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ae5a74-bab6-ca19-4354-af248d5a4b1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.775617] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.786270] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1841.786807] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1841.786807] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1841.787234] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1841.787278] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1841.787991] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1841.787991] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1841.787991] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1841.788148] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1841.788581] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1841.788890] env[62510]: DEBUG nova.virt.hardware [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1841.791667] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c566b232-c240-4774-9f51-9239c821875d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.802343] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a01ff5-911d-4b50-a6b6-81874462d687 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.880982] env[62510]: DEBUG oslo_vmware.api [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Task: {'id': task-1769354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136071} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.881271] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1841.881473] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1841.881667] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1841.881843] env[62510]: INFO nova.compute.manager [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1841.882171] env[62510]: DEBUG oslo.service.loopingcall [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.882384] env[62510]: DEBUG nova.compute.manager [-] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1841.882489] env[62510]: DEBUG nova.network.neutron [-] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1841.896045] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: d3e25d50-f315-439b-9e9f-8e454a0631d4] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1841.900572] env[62510]: DEBUG nova.network.neutron [-] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1841.962981] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769355, 'name': ReconfigVM_Task, 'duration_secs': 0.399342} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.965544] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 01204162-bf8e-46e0-bcf4-00df9ed7e7ce/01204162-bf8e-46e0-bcf4-00df9ed7e7ce.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1841.966230] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abd0fef4-d54d-40b2-93da-2f6c94e37d47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.978261] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1841.978261] env[62510]: value = "task-1769356" [ 1841.978261] env[62510]: _type = "Task" [ 1841.978261] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.989329] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769356, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.995947] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5290c3c8-d3a1-05ec-1c05-09e7a3f3ee04, 'name': SearchDatastore_Task, 'duration_secs': 0.014231} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.996761] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93920eee-3720-4fc8-8c27-6ae0485edf80 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.004074] env[62510]: DEBUG oslo_concurrency.lockutils [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.009589] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1842.009589] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52907bd8-7954-4832-3b2d-cda860bac92d" [ 1842.009589] env[62510]: _type = "Task" [ 1842.009589] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.017990] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52907bd8-7954-4832-3b2d-cda860bac92d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.148099] env[62510]: DEBUG oslo_concurrency.lockutils [req-89f7d847-c7c3-4d38-b538-bd5db647ce8d req-75213e37-66cd-4c26-a9a5-53da96db5334 service nova] Releasing lock "refresh_cache-e7e053be-cb88-4ae0-b157-3006211f77d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.241544] env[62510]: DEBUG nova.compute.utils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1842.243032] env[62510]: DEBUG nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1842.243215] env[62510]: DEBUG nova.network.neutron [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1842.271649] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ae5a74-bab6-ca19-4354-af248d5a4b1e, 'name': SearchDatastore_Task, 'duration_secs': 0.012503} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.272038] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.272291] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1842.272502] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.294078] env[62510]: DEBUG nova.policy [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8bab2df0cdfb4853879515120e93ce25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de0d125bba6242d3b9614402098efc1f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1842.325886] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "313f7916-0737-4e44-ae2f-58301934bf06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.399126] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: b5ff2a10-3c76-469a-86e0-ed3b135bca37] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1842.402690] env[62510]: DEBUG nova.network.neutron [-] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.489645] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769356, 'name': Rename_Task, 'duration_secs': 0.165678} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.490139] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1842.490580] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f975b4dd-9cac-4a0c-a926-b38aae8f082b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.500190] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1842.500190] env[62510]: value = "task-1769357" [ 1842.500190] env[62510]: _type = "Task" [ 1842.500190] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.515337] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.516366] env[62510]: DEBUG nova.compute.manager [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1842.521767] env[62510]: DEBUG nova.compute.manager [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Received event network-changed-b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1842.522140] env[62510]: DEBUG nova.compute.manager [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Refreshing instance network info cache due to event network-changed-b8e2da8e-86d0-4706-bdbb-39da14ef3e15. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1842.523338] env[62510]: DEBUG oslo_concurrency.lockutils [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] Acquiring lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.523338] env[62510]: DEBUG oslo_concurrency.lockutils [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] Acquired lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.523338] env[62510]: DEBUG nova.network.neutron [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Refreshing network info cache for port b8e2da8e-86d0-4706-bdbb-39da14ef3e15 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1842.526563] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d78c332-d824-4015-b691-ec4c0371df23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.546346] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52907bd8-7954-4832-3b2d-cda860bac92d, 'name': SearchDatastore_Task, 'duration_secs': 0.019831} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.553023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.553023] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e7e053be-cb88-4ae0-b157-3006211f77d9/e7e053be-cb88-4ae0-b157-3006211f77d9.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1842.553023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.553023] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1842.553023] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43e3bfaa-923d-4c1b-8586-cfff40b0d7d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.555090] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-225269f7-37d8-4960-817f-430921488ad2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.565429] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1842.565429] env[62510]: value = "task-1769358" [ 1842.565429] env[62510]: _type = "Task" [ 1842.565429] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.571068] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1842.571325] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1842.572354] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd793794-fae6-43bb-ae16-10476876df50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.587065] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.587065] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1842.587065] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52ff2035-5611-9f56-7d8c-b16a1a10627b" [ 1842.587065] env[62510]: _type = "Task" [ 1842.587065] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.604915] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52ff2035-5611-9f56-7d8c-b16a1a10627b, 'name': SearchDatastore_Task, 'duration_secs': 0.0144} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.605982] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec0c801c-9523-41aa-9d83-21ca2c624641 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.617362] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1842.617362] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a5d808-e65f-09d1-4035-17543921d0a0" [ 1842.617362] env[62510]: _type = "Task" [ 1842.617362] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.632032] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a5d808-e65f-09d1-4035-17543921d0a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.645254] env[62510]: DEBUG nova.network.neutron [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Successfully created port: 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1842.749627] env[62510]: DEBUG nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1842.903717] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 641628d1-bb6d-4207-89b9-98014328e028] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1842.906019] env[62510]: INFO nova.compute.manager [-] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Took 1.02 seconds to deallocate network for instance. [ 1843.012533] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769357, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.072022] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e98c23-6de0-4571-b30b-c4556f106087 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.078022] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769358, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.085362] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f484e4a9-25bf-4e61-becf-14c2fe97b3ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.122224] env[62510]: DEBUG nova.network.neutron [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Successfully updated port: 965c7e31-fbcc-4660-900b-d657b5aa8abb {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1843.128301] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370e173d-e3d1-49ed-9022-317197ff3e2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.144023] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f82856-cb36-42e7-a5fb-8c162a63c7ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.148833] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a5d808-e65f-09d1-4035-17543921d0a0, 'name': SearchDatastore_Task, 'duration_secs': 0.019109} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.149137] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.149368] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1843.149992] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1b054f2-89a5-4d44-b69e-22e777fa505a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.161210] env[62510]: DEBUG nova.compute.provider_tree [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1843.166770] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1843.166770] env[62510]: value = "task-1769359" [ 1843.166770] env[62510]: _type = "Task" [ 1843.166770] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.182674] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.412523] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 13cdba63-5db4-419f-9e0b-244832d7866b] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1843.415109] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.467054] env[62510]: DEBUG nova.network.neutron [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Updated VIF entry in instance network info cache for port b8e2da8e-86d0-4706-bdbb-39da14ef3e15. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1843.467054] env[62510]: DEBUG nova.network.neutron [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Updating instance_info_cache with network_info: [{"id": "b8e2da8e-86d0-4706-bdbb-39da14ef3e15", "address": "fa:16:3e:cd:c0:e6", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": null, "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb8e2da8e-86", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.513652] env[62510]: DEBUG oslo_vmware.api [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769357, 'name': PowerOnVM_Task, 'duration_secs': 0.716579} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.513750] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1843.514224] env[62510]: INFO nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1843.514224] env[62510]: DEBUG nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1843.515533] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080ad043-cb42-4602-8f63-bfd5fe064c5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.556767] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-811432bd-f3fb-4e21-a894-d67e95487f11 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.566804] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Doing hard reboot of VM {{(pid=62510) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1843.570940] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-cde41566-679f-4623-9a66-185405fef747 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.581059] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769358, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556507} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.582592] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e7e053be-cb88-4ae0-b157-3006211f77d9/e7e053be-cb88-4ae0-b157-3006211f77d9.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1843.582873] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1843.583171] env[62510]: DEBUG oslo_vmware.api [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1843.583171] env[62510]: value = "task-1769360" [ 1843.583171] env[62510]: _type = "Task" [ 1843.583171] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.583462] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c6265f2-7193-496c-831c-bc7599a0f02c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.601067] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1843.601067] env[62510]: value = "task-1769361" [ 1843.601067] env[62510]: _type = "Task" [ 1843.601067] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.604785] env[62510]: DEBUG oslo_vmware.api [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769360, 'name': ResetVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.615564] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.635389] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "refresh_cache-2e24b76d-a770-4f1e-a8f1-a54417f1be81" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.635645] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "refresh_cache-2e24b76d-a770-4f1e-a8f1-a54417f1be81" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.635777] env[62510]: DEBUG nova.network.neutron [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1843.664421] env[62510]: DEBUG nova.scheduler.client.report [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1843.681629] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769359, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487894} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.681629] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1843.681946] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1843.682052] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9273fe98-303c-4a3f-ae79-c379921ea54e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.691217] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1843.691217] env[62510]: value = "task-1769362" [ 1843.691217] env[62510]: _type = "Task" [ 1843.691217] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.701522] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769362, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.760108] env[62510]: DEBUG nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1843.790098] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1843.790364] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1843.790564] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1843.790975] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1843.790975] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1843.791150] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1843.791391] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1843.791465] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1843.791757] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1843.791757] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1843.791958] env[62510]: DEBUG nova.virt.hardware [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1843.792867] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2277f693-78a1-48f7-a647-fc2401c754ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.803106] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107db837-ab74-4310-af3a-ade8106b1391 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.916929] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: fa43a538-1aae-4642-8370-70f2a49ca92c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1843.967717] env[62510]: DEBUG oslo_concurrency.lockutils [req-e8389a04-75ba-4c4a-afa1-c7e1f8261652 req-b52ce1cc-fdd1-4d21-830a-c063ffd628ed service nova] Releasing lock "refresh_cache-313f7916-0737-4e44-ae2f-58301934bf06" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.037762] env[62510]: INFO nova.compute.manager [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Took 33.02 seconds to build instance. [ 1844.098048] env[62510]: DEBUG oslo_vmware.api [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769360, 'name': ResetVM_Task, 'duration_secs': 0.114934} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.098396] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Did hard reboot of VM {{(pid=62510) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1844.098613] env[62510]: DEBUG nova.compute.manager [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1844.099430] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8586a2-208f-48e8-bdf9-ce7eebedeee9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.118561] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111676} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.118843] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1844.122017] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88817360-f178-4e59-b172-0f5fa10ee451 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.143297] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] e7e053be-cb88-4ae0-b157-3006211f77d9/e7e053be-cb88-4ae0-b157-3006211f77d9.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1844.145658] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d8216e3f-31f7-48c9-9794-d0a9034e8352 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.170775] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1844.170775] env[62510]: value = "task-1769363" [ 1844.170775] env[62510]: _type = "Task" [ 1844.170775] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.172924] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.440s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.173608] env[62510]: DEBUG nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1844.177104] env[62510]: DEBUG nova.compute.manager [req-ac05c23b-7cd3-4494-a53f-946ffe860360 req-e1d83e1e-2d52-4f87-97d2-64f37b463768 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Received event network-vif-plugged-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1844.177316] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac05c23b-7cd3-4494-a53f-946ffe860360 req-e1d83e1e-2d52-4f87-97d2-64f37b463768 service nova] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.177559] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac05c23b-7cd3-4494-a53f-946ffe860360 req-e1d83e1e-2d52-4f87-97d2-64f37b463768 service nova] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.177731] env[62510]: DEBUG oslo_concurrency.lockutils [req-ac05c23b-7cd3-4494-a53f-946ffe860360 req-e1d83e1e-2d52-4f87-97d2-64f37b463768 service nova] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.177898] env[62510]: DEBUG nova.compute.manager [req-ac05c23b-7cd3-4494-a53f-946ffe860360 req-e1d83e1e-2d52-4f87-97d2-64f37b463768 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] No waiting events found dispatching network-vif-plugged-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1844.178095] env[62510]: WARNING nova.compute.manager [req-ac05c23b-7cd3-4494-a53f-946ffe860360 req-e1d83e1e-2d52-4f87-97d2-64f37b463768 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Received unexpected event network-vif-plugged-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 for instance with vm_state building and task_state spawning. [ 1844.183072] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.875s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.183392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.186169] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.258s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.186905] env[62510]: DEBUG nova.objects.instance [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lazy-loading 'resources' on Instance uuid 9fe592c1-e23a-46d5-8952-c181709d93e7 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1844.197731] env[62510]: DEBUG nova.network.neutron [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1844.205884] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.212057] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769362, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118809} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.212428] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1844.213559] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bc00b2-944f-448c-82f9-75575fd5095b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.219580] env[62510]: INFO nova.scheduler.client.report [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted allocations for instance 241d842d-3dd5-4ac2-a18a-12b9c9fbd340 [ 1844.251788] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1844.253940] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d9248cd-0910-4178-86ed-0f62def3ae68 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.275596] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1844.275596] env[62510]: value = "task-1769364" [ 1844.275596] env[62510]: _type = "Task" [ 1844.275596] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.284810] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769364, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.334114] env[62510]: DEBUG nova.network.neutron [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Successfully updated port: 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1844.421394] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0eb7e2d3-5e12-4f2c-9e36-30f4b637ed2a] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1844.502859] env[62510]: DEBUG nova.network.neutron [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Updating instance_info_cache with network_info: [{"id": "965c7e31-fbcc-4660-900b-d657b5aa8abb", "address": "fa:16:3e:77:a9:23", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap965c7e31-fb", "ovs_interfaceid": "965c7e31-fbcc-4660-900b-d657b5aa8abb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.540389] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e718ab83-ae5b-41dd-be76-bff65df1a902 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.532s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.619672] env[62510]: DEBUG oslo_concurrency.lockutils [None req-373bce13-1b98-4e63-a460-d446126bd0ed tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.898s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.680249] env[62510]: DEBUG nova.compute.utils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1844.683257] env[62510]: DEBUG nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1844.683257] env[62510]: DEBUG nova.network.neutron [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1844.699590] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769363, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.720234] env[62510]: DEBUG nova.compute.manager [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Received event network-vif-plugged-965c7e31-fbcc-4660-900b-d657b5aa8abb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1844.720890] env[62510]: DEBUG oslo_concurrency.lockutils [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] Acquiring lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.720890] env[62510]: DEBUG oslo_concurrency.lockutils [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.720890] env[62510]: DEBUG oslo_concurrency.lockutils [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.721418] env[62510]: DEBUG nova.compute.manager [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] No waiting events found dispatching network-vif-plugged-965c7e31-fbcc-4660-900b-d657b5aa8abb {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1844.721878] env[62510]: WARNING nova.compute.manager [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Received unexpected event network-vif-plugged-965c7e31-fbcc-4660-900b-d657b5aa8abb for instance with vm_state building and task_state spawning. [ 1844.722362] env[62510]: DEBUG nova.compute.manager [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Received event network-changed-965c7e31-fbcc-4660-900b-d657b5aa8abb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1844.722453] env[62510]: DEBUG nova.compute.manager [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Refreshing instance network info cache due to event network-changed-965c7e31-fbcc-4660-900b-d657b5aa8abb. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1844.722646] env[62510]: DEBUG oslo_concurrency.lockutils [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] Acquiring lock "refresh_cache-2e24b76d-a770-4f1e-a8f1-a54417f1be81" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.755942] env[62510]: DEBUG oslo_concurrency.lockutils [None req-adebd63f-8aef-4dfe-a3d9-abd87a7f08d4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "241d842d-3dd5-4ac2-a18a-12b9c9fbd340" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.385s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.785352] env[62510]: DEBUG nova.policy [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93399cd69f4245188fd39bde29ee3d5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11c021c6b45c452f83732fe578e576f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1844.793966] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.844594] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1844.844977] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1844.845322] env[62510]: DEBUG nova.network.neutron [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1844.926250] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e7daad63-c802-4a86-bead-7e849064ed61] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1844.988805] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e5c2cf-e504-49e7-a243-c948fecbc5c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.998476] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589ebb95-803b-404f-8fb6-bbdba997f825 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.005949] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "refresh_cache-2e24b76d-a770-4f1e-a8f1-a54417f1be81" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.006384] env[62510]: DEBUG nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Instance network_info: |[{"id": "965c7e31-fbcc-4660-900b-d657b5aa8abb", "address": "fa:16:3e:77:a9:23", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap965c7e31-fb", "ovs_interfaceid": "965c7e31-fbcc-4660-900b-d657b5aa8abb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1845.006730] env[62510]: DEBUG oslo_concurrency.lockutils [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] Acquired lock "refresh_cache-2e24b76d-a770-4f1e-a8f1-a54417f1be81" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.006999] env[62510]: DEBUG nova.network.neutron [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Refreshing network info cache for port 965c7e31-fbcc-4660-900b-d657b5aa8abb {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1845.008500] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:a9:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '26472e27-9835-4f87-ab7f-ca24dfee4e83', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '965c7e31-fbcc-4660-900b-d657b5aa8abb', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1845.017533] env[62510]: DEBUG oslo.service.loopingcall [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.043529] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1845.044064] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a705c9e-581c-4f45-b621-72feaeb6a09a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.061020] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caeea194-08b3-49b2-ade7-d36fbebd6bb3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.070844] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4216597-cb07-4c57-b96c-41b3011c2209 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.076871] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1845.076871] env[62510]: value = "task-1769365" [ 1845.076871] env[62510]: _type = "Task" [ 1845.076871] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.087388] env[62510]: DEBUG nova.compute.provider_tree [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1845.094804] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769365, 'name': CreateVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.182729] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769363, 'name': ReconfigVM_Task, 'duration_secs': 0.707786} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.182969] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Reconfigured VM instance instance-0000005f to attach disk [datastore1] e7e053be-cb88-4ae0-b157-3006211f77d9/e7e053be-cb88-4ae0-b157-3006211f77d9.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1845.183639] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86a465ea-f29f-4504-9b8b-99a373532ec9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.192034] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1845.192034] env[62510]: value = "task-1769366" [ 1845.192034] env[62510]: _type = "Task" [ 1845.192034] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.195751] env[62510]: DEBUG nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1845.206337] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769366, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.289932] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769364, 'name': ReconfigVM_Task, 'duration_secs': 0.626209} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.290264] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfigured VM instance instance-0000004e to attach disk [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091/bc474f8b-dd3b-4d7a-a8e0-fea5570b3091.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1845.291414] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'encrypted': False, 'device_type': 'disk', 'size': 0, 'disk_bus': None, 'guest_format': None, 'encryption_options': None, 'boot_index': 0, 'device_name': '/dev/sda', 'encryption_format': None, 'image_id': '645af513-c243-4722-b631-714f21477ae6'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'}, 'attachment_id': '13f58a82-3e10-4d5d-a082-d867ef1ff1b7', 'mount_device': '/dev/sdb', 'device_type': None, 'disk_bus': None, 'guest_format': None, 'boot_index': None, 'delete_on_termination': False, 'volume_type': None}], 'swap': None} {{(pid=62510) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1845.291654] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1845.291961] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1845.292730] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947fd21a-5c45-4c78-b8b0-9fc3ce2aacca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.310356] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f64d24-8f73-4f2e-b521-eddcae9afd33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.126825] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 350d5f83-d9ce-4997-bf57-70c4a4e22ba0] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1846.131081] env[62510]: DEBUG nova.scheduler.client.report [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1846.145515] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8/volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1846.146457] env[62510]: DEBUG nova.network.neutron [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Successfully created port: f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1846.154456] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4840a35a-e856-4581-b251-116bfd66c9ff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.180211] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769365, 'name': CreateVM_Task, 'duration_secs': 0.47728} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.180211] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769366, 'name': Rename_Task, 'duration_secs': 0.215719} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.181078] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1846.181366] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1846.182490] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1846.182490] env[62510]: value = "task-1769367" [ 1846.182490] env[62510]: _type = "Task" [ 1846.182490] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.183134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.185035] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.185035] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1846.185035] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c69405a8-4ac4-43dd-a221-2c588442b24d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.185850] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0797736-9a33-4db3-9bc7-8495fa4a547c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.196089] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1846.196089] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52369bab-c459-13d8-9589-155bf2b6f7ea" [ 1846.196089] env[62510]: _type = "Task" [ 1846.196089] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.200615] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1846.200615] env[62510]: value = "task-1769368" [ 1846.200615] env[62510]: _type = "Task" [ 1846.200615] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.201072] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.205643] env[62510]: DEBUG nova.network.neutron [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1846.217176] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52369bab-c459-13d8-9589-155bf2b6f7ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.222817] env[62510]: DEBUG nova.compute.manager [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Received event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1846.223051] env[62510]: DEBUG nova.compute.manager [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing instance network info cache due to event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1846.223263] env[62510]: DEBUG oslo_concurrency.lockutils [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] Acquiring lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.223925] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.364042] env[62510]: DEBUG nova.network.neutron [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.647838] env[62510]: DEBUG nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1846.650720] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.465s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.653515] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: bd21dd81-c0d9-4ff1-9183-0b4622dc5afb] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1846.655742] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.387s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.657689] env[62510]: INFO nova.compute.claims [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1846.683370] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1846.683370] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1846.683549] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1846.683711] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1846.684211] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1846.684211] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1846.684506] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1846.684763] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1846.684977] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1846.685162] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1846.685382] env[62510]: DEBUG nova.virt.hardware [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1846.686375] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39745573-309f-4ca6-961c-a74920e9e528 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.703070] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769367, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.705493] env[62510]: INFO nova.scheduler.client.report [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Deleted allocations for instance 9fe592c1-e23a-46d5-8952-c181709d93e7 [ 1846.713740] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979e5753-3c82-4dd1-b343-988064e48829 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.747241] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52369bab-c459-13d8-9589-155bf2b6f7ea, 'name': SearchDatastore_Task, 'duration_secs': 0.022285} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.747519] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769368, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.748107] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.748404] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1846.748728] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.748933] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.749174] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1846.749455] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d317cd4b-0912-4afe-b043-e2c6a42fd178 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.760626] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1846.760923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1846.761530] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbd62eed-9f61-42e7-91e6-e31bf433639d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.768953] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1846.768953] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52edfb3a-c1fc-3966-0e54-a298e9eb6213" [ 1846.768953] env[62510]: _type = "Task" [ 1846.768953] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.779891] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52edfb3a-c1fc-3966-0e54-a298e9eb6213, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.834341] env[62510]: DEBUG nova.network.neutron [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Updated VIF entry in instance network info cache for port 965c7e31-fbcc-4660-900b-d657b5aa8abb. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1846.834694] env[62510]: DEBUG nova.network.neutron [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Updating instance_info_cache with network_info: [{"id": "965c7e31-fbcc-4660-900b-d657b5aa8abb", "address": "fa:16:3e:77:a9:23", "network": {"id": "eacdaedf-3a2d-4349-b143-cb9b1d95c822", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1418315988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68579c8354b4431e8ec51575cda77325", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "26472e27-9835-4f87-ab7f-ca24dfee4e83", "external-id": "nsx-vlan-transportzone-335", "segmentation_id": 335, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap965c7e31-fb", "ovs_interfaceid": "965c7e31-fbcc-4660-900b-d657b5aa8abb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.867828] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.867828] env[62510]: DEBUG nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Instance network_info: |[{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1846.867828] env[62510]: DEBUG oslo_concurrency.lockutils [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] Acquired lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.867828] env[62510]: DEBUG nova.network.neutron [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1846.868927] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:14:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d5c92f8-54e7-4731-bc8e-a3598f21a0b6', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1846.876731] env[62510]: DEBUG oslo.service.loopingcall [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.877176] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1846.877516] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-941c625b-e00e-45e9-b6f5-a9b1cd7f4b9c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.900760] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1846.900760] env[62510]: value = "task-1769369" [ 1846.900760] env[62510]: _type = "Task" [ 1846.900760] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.909503] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769369, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.163178] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e3850272-9dae-4164-8f0e-f5513af23f49] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1847.197714] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769367, 'name': ReconfigVM_Task, 'duration_secs': 0.532792} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.197936] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfigured VM instance instance-0000004e to attach disk [datastore1] volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8/volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1847.203699] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-050655f7-15e1-4fbc-8203-be6ea95c99f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.225413] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1847.225413] env[62510]: value = "task-1769370" [ 1847.225413] env[62510]: _type = "Task" [ 1847.225413] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.234983] env[62510]: DEBUG oslo_vmware.api [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769368, 'name': PowerOnVM_Task, 'duration_secs': 0.642935} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.235512] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e100115f-e0f5-4a23-adf6-107caff81f46 tempest-ImagesTestJSON-1872682908 tempest-ImagesTestJSON-1872682908-project-member] Lock "9fe592c1-e23a-46d5-8952-c181709d93e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.506s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.236899] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1847.237175] env[62510]: INFO nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Took 9.04 seconds to spawn the instance on the hypervisor. [ 1847.237900] env[62510]: DEBUG nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1847.238520] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9192cdf7-a250-4245-a2e1-c76f06dbc085 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.247606] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.281027] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52edfb3a-c1fc-3966-0e54-a298e9eb6213, 'name': SearchDatastore_Task, 'duration_secs': 0.010701} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.281928] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c0c5aaa-9e32-45bf-980c-71bcc731056d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.289253] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1847.289253] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52291592-1787-d100-9e3d-8adf48cf25bb" [ 1847.289253] env[62510]: _type = "Task" [ 1847.289253] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.304061] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52291592-1787-d100-9e3d-8adf48cf25bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.337312] env[62510]: DEBUG oslo_concurrency.lockutils [req-1ecb3a95-b619-4389-900f-18239b4ecdc5 req-5f43cb9a-897b-4230-9a24-80d77e311c5d service nova] Releasing lock "refresh_cache-2e24b76d-a770-4f1e-a8f1-a54417f1be81" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.412087] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769369, 'name': CreateVM_Task, 'duration_secs': 0.410758} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.412373] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1847.413047] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.413252] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.413610] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1847.413911] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44054a1c-934a-4aae-b5ec-fe5885446302 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.419864] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1847.419864] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52282dc8-c661-d2f2-52d3-02488bd113c7" [ 1847.419864] env[62510]: _type = "Task" [ 1847.419864] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.433098] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52282dc8-c661-d2f2-52d3-02488bd113c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.669368] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 568ce58c-9ce5-4b40-988f-f31d8e0c376d] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1847.686412] env[62510]: DEBUG nova.network.neutron [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updated VIF entry in instance network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1847.686877] env[62510]: DEBUG nova.network.neutron [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.740703] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769370, 'name': ReconfigVM_Task, 'duration_secs': 0.268027} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.741014] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1847.742297] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d6c2d26-acb1-46e9-b13a-bb70cdf69f3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.751286] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1847.751286] env[62510]: value = "task-1769371" [ 1847.751286] env[62510]: _type = "Task" [ 1847.751286] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.765452] env[62510]: INFO nova.compute.manager [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Took 35.91 seconds to build instance. [ 1847.766742] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769371, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.803112] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52291592-1787-d100-9e3d-8adf48cf25bb, 'name': SearchDatastore_Task, 'duration_secs': 0.012788} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.803440] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.803759] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2e24b76d-a770-4f1e-a8f1-a54417f1be81/2e24b76d-a770-4f1e-a8f1-a54417f1be81.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1847.804035] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21e356a5-05b0-43a9-a26c-59a92dd5d126 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.812392] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1847.812392] env[62510]: value = "task-1769372" [ 1847.812392] env[62510]: _type = "Task" [ 1847.812392] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.825283] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.918200] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e33d8a5-1d1f-46d3-a834-5dd5363f3ed6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.929335] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a6dbea-7fec-443c-b0c1-6af77e5b9b9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.936825] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52282dc8-c661-d2f2-52d3-02488bd113c7, 'name': SearchDatastore_Task, 'duration_secs': 0.012652} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.937602] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.937858] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1847.938130] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.938282] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.938472] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1847.938753] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33d0a532-a883-460b-8a5d-fc7fc932064c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.970467] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684a1f80-ec9c-400d-8c4c-4f9178227d7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.979677] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdd1261-0198-4a82-b50d-8b78983f6f87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.984394] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1847.984586] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1847.985339] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e63dec18-c02f-4eda-b8dd-6bde7ef68988 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.998857] env[62510]: DEBUG nova.compute.provider_tree [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.004237] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1848.004237] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c5d83b-ef50-fa61-7071-09fe44cee544" [ 1848.004237] env[62510]: _type = "Task" [ 1848.004237] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.012926] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c5d83b-ef50-fa61-7071-09fe44cee544, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.175262] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: fae7e580-ab09-4fda-9cbe-0e066ddcb85c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1848.194022] env[62510]: DEBUG oslo_concurrency.lockutils [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] Releasing lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.194022] env[62510]: DEBUG nova.compute.manager [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Received event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1848.194022] env[62510]: DEBUG nova.compute.manager [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing instance network info cache due to event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1848.194022] env[62510]: DEBUG oslo_concurrency.lockutils [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] Acquiring lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.194022] env[62510]: DEBUG oslo_concurrency.lockutils [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] Acquired lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.194022] env[62510]: DEBUG nova.network.neutron [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1848.262745] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769371, 'name': Rename_Task, 'duration_secs': 0.221442} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.263075] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1848.263345] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47c042a5-168a-4235-8233-d46f854179bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.267965] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2da46409-3520-4557-aa7c-e0c6c62f0749 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.422s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.273775] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1848.273775] env[62510]: value = "task-1769373" [ 1848.273775] env[62510]: _type = "Task" [ 1848.273775] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.286704] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.325945] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769372, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.331513] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.331752] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.471192] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dfec6-6a71-4196-fed0-3969596804d7/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1848.472228] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78435848-870d-4deb-b3ad-0fe56dc7c393 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.479672] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dfec6-6a71-4196-fed0-3969596804d7/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1848.479840] env[62510]: ERROR oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dfec6-6a71-4196-fed0-3969596804d7/disk-0.vmdk due to incomplete transfer. [ 1848.480088] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2e710d10-2f89-4908-82bc-d5b34cb8b6f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.488222] env[62510]: DEBUG oslo_vmware.rw_handles [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/524dfec6-6a71-4196-fed0-3969596804d7/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1848.488519] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Uploaded image 2c220807-cc2e-49b9-90e8-39effcef5fb1 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1848.495503] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1848.495765] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4b33c42c-9dff-498a-83a2-6e0b4560c8cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.503514] env[62510]: DEBUG nova.scheduler.client.report [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1848.521719] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1848.521719] env[62510]: value = "task-1769374" [ 1848.521719] env[62510]: _type = "Task" [ 1848.521719] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.533688] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c5d83b-ef50-fa61-7071-09fe44cee544, 'name': SearchDatastore_Task, 'duration_secs': 0.068829} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.535235] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62453775-e166-4d80-9cfc-959cf167919c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.541858] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769374, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.545828] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1848.545828] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f65aa3-5ac8-6051-029e-8f7324092052" [ 1848.545828] env[62510]: _type = "Task" [ 1848.545828] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.554935] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f65aa3-5ac8-6051-029e-8f7324092052, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.679055] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: ff261d7d-40c4-4db2-8ce3-3aaf28b48ad3] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1848.784940] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769373, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.822015] env[62510]: DEBUG nova.compute.manager [req-6c569c9f-5ef3-4e1d-a9df-06ee8e4dea62 req-8712e15c-df03-420a-8811-1cb367b1f441 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Received event network-vif-plugged-f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1848.822226] env[62510]: DEBUG oslo_concurrency.lockutils [req-6c569c9f-5ef3-4e1d-a9df-06ee8e4dea62 req-8712e15c-df03-420a-8811-1cb367b1f441 service nova] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.822502] env[62510]: DEBUG oslo_concurrency.lockutils [req-6c569c9f-5ef3-4e1d-a9df-06ee8e4dea62 req-8712e15c-df03-420a-8811-1cb367b1f441 service nova] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.822638] env[62510]: DEBUG oslo_concurrency.lockutils [req-6c569c9f-5ef3-4e1d-a9df-06ee8e4dea62 req-8712e15c-df03-420a-8811-1cb367b1f441 service nova] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.822798] env[62510]: DEBUG nova.compute.manager [req-6c569c9f-5ef3-4e1d-a9df-06ee8e4dea62 req-8712e15c-df03-420a-8811-1cb367b1f441 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] No waiting events found dispatching network-vif-plugged-f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1848.822956] env[62510]: WARNING nova.compute.manager [req-6c569c9f-5ef3-4e1d-a9df-06ee8e4dea62 req-8712e15c-df03-420a-8811-1cb367b1f441 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Received unexpected event network-vif-plugged-f5b21632-114e-43ff-8c8e-a6ff44e674eb for instance with vm_state building and task_state spawning. [ 1848.831274] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519309} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.831274] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 2e24b76d-a770-4f1e-a8f1-a54417f1be81/2e24b76d-a770-4f1e-a8f1-a54417f1be81.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1848.831274] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1848.831274] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7261f8d3-3c24-4fa8-b54b-1a973572f988 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.834799] env[62510]: DEBUG nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1848.838588] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1848.838588] env[62510]: value = "task-1769375" [ 1848.838588] env[62510]: _type = "Task" [ 1848.838588] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.847651] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.865796] env[62510]: DEBUG nova.network.neutron [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Successfully updated port: f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1849.023657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.024216] env[62510]: DEBUG nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1849.027263] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.915s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.027603] env[62510]: DEBUG nova.objects.instance [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lazy-loading 'resources' on Instance uuid ebd2dc4b-8d74-47db-861e-870d41a4150b {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.040794] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769374, 'name': Destroy_Task, 'duration_secs': 0.48836} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.041055] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Destroyed the VM [ 1849.041294] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1849.041583] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-69b52116-2248-4b7a-8909-16f713b576fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.051768] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1849.051768] env[62510]: value = "task-1769376" [ 1849.051768] env[62510]: _type = "Task" [ 1849.051768] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.060222] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f65aa3-5ac8-6051-029e-8f7324092052, 'name': SearchDatastore_Task, 'duration_secs': 0.011394} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.061104] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.061424] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/5f229f78-6c5d-4170-bdd4-c5522b137949.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1849.061775] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d02101e9-46b8-47a3-aa69-e3de825a0ee1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.067590] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769376, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.075572] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1849.075572] env[62510]: value = "task-1769377" [ 1849.075572] env[62510]: _type = "Task" [ 1849.075572] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.086877] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.183011] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 87d1d75e-41c4-42e6-bf58-deabb71400e1] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1849.290718] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769373, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.366356] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232158} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.366356] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1849.367056] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ce131b-93ec-42a4-8fda-a6175969fa6d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.372036] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.372447] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.372880] env[62510]: DEBUG nova.network.neutron [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1849.375975] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.402019] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 2e24b76d-a770-4f1e-a8f1-a54417f1be81/2e24b76d-a770-4f1e-a8f1-a54417f1be81.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1849.403373] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd052a29-2227-4897-b7e4-5bc707db47bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.429032] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1849.429032] env[62510]: value = "task-1769378" [ 1849.429032] env[62510]: _type = "Task" [ 1849.429032] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.438848] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769378, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.540104] env[62510]: DEBUG nova.compute.utils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1849.540104] env[62510]: DEBUG nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1849.540104] env[62510]: DEBUG nova.network.neutron [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1849.568266] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769376, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.589616] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769377, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.640668] env[62510]: DEBUG nova.network.neutron [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updated VIF entry in instance network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1849.640668] env[62510]: DEBUG nova.network.neutron [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.690143] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: f9eb5110-28ec-474e-b80e-0bfcee51483d] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1849.696436] env[62510]: DEBUG nova.policy [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ac183a237d24bc2a644cc3ba05d7f1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59300e0f20144d9f88b78f7c971e86c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1849.787512] env[62510]: DEBUG oslo_vmware.api [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769373, 'name': PowerOnVM_Task, 'duration_secs': 1.128969} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.790325] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1849.790679] env[62510]: DEBUG nova.compute.manager [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1849.791738] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7108d5e-8836-4a93-bf0c-715a5cc09ac3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.802689] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace8adc4-561e-4cb8-b740-653a747ff14f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.810414] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee6dcc3-60ca-40e6-9bfd-173fc6dbc207 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.843792] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e775e84c-55d6-4150-9ff1-da4bfcebe276 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.852845] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa50f4fd-8f1d-49ce-8685-c44ce0c7ef22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.867495] env[62510]: DEBUG nova.compute.provider_tree [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.939248] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769378, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.977400] env[62510]: DEBUG nova.network.neutron [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.047017] env[62510]: DEBUG nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1850.064373] env[62510]: DEBUG oslo_vmware.api [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769376, 'name': RemoveSnapshot_Task, 'duration_secs': 0.600176} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.064878] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1850.065258] env[62510]: INFO nova.compute.manager [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Took 13.39 seconds to snapshot the instance on the hypervisor. [ 1850.087608] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542816} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.087608] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/5f229f78-6c5d-4170-bdd4-c5522b137949.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1850.087608] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1850.087608] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-caca2d55-03c3-4e36-b9cd-45f6eb5299ae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.095268] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1850.095268] env[62510]: value = "task-1769379" [ 1850.095268] env[62510]: _type = "Task" [ 1850.095268] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.105481] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.142459] env[62510]: DEBUG oslo_concurrency.lockutils [req-16c92072-f785-4ca4-a5c3-051549dd8ec2 req-bcf31bea-d55f-45ac-b3e3-67e5f5bd827d service nova] Releasing lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.198091] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9d5d29ea-be92-4881-9fc8-fea3f2f442d0] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1850.327720] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.374259] env[62510]: DEBUG nova.scheduler.client.report [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1850.411142] env[62510]: DEBUG nova.network.neutron [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Updating instance_info_cache with network_info: [{"id": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "address": "fa:16:3e:2a:d0:51", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b21632-11", "ovs_interfaceid": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.445227] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769378, 'name': ReconfigVM_Task, 'duration_secs': 0.86409} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.445811] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 2e24b76d-a770-4f1e-a8f1-a54417f1be81/2e24b76d-a770-4f1e-a8f1-a54417f1be81.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1850.446850] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9f5afe83-b9ef-48e9-b21b-b8b303741095 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.459318] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1850.459318] env[62510]: value = "task-1769380" [ 1850.459318] env[62510]: _type = "Task" [ 1850.459318] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.475994] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769380, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.539247] env[62510]: DEBUG nova.network.neutron [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Successfully created port: 68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1850.607162] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066071} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.607425] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1850.608301] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29634f6-4601-4e74-8d6c-4db034c8e8e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.636388] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/5f229f78-6c5d-4170-bdd4-c5522b137949.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1850.639278] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd7221a6-b5f6-4e17-932f-403d301bf08d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.669633] env[62510]: DEBUG nova.compute.manager [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Found 3 images (rotation: 2) {{(pid=62510) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4924}} [ 1850.669836] env[62510]: DEBUG nova.compute.manager [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Rotating out 1 backups {{(pid=62510) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4932}} [ 1850.670154] env[62510]: DEBUG nova.compute.manager [None req-f72e487a-da5f-4099-b625-0de179918570 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleting image 2bd1ac76-8500-47cd-b52c-8213b9025fc0 {{(pid=62510) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4937}} [ 1850.673515] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1850.673515] env[62510]: value = "task-1769381" [ 1850.673515] env[62510]: _type = "Task" [ 1850.673515] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.684924] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769381, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.699900] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2c5d137d-4fd5-4035-a04f-bdb76e90edd7] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1850.748328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "e7e053be-cb88-4ae0-b157-3006211f77d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.748739] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.749450] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "e7e053be-cb88-4ae0-b157-3006211f77d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.749450] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.749450] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.751619] env[62510]: INFO nova.compute.manager [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Terminating instance [ 1850.882569] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.854s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.884787] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.109s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1850.886397] env[62510]: DEBUG nova.objects.instance [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'resources' on Instance uuid 313f7916-0737-4e44-ae2f-58301934bf06 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1850.896591] env[62510]: DEBUG nova.compute.manager [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Received event network-changed-f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1850.896591] env[62510]: DEBUG nova.compute.manager [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Refreshing instance network info cache due to event network-changed-f5b21632-114e-43ff-8c8e-a6ff44e674eb. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1850.896591] env[62510]: DEBUG oslo_concurrency.lockutils [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] Acquiring lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.915580] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.915736] env[62510]: DEBUG nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Instance network_info: |[{"id": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "address": "fa:16:3e:2a:d0:51", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b21632-11", "ovs_interfaceid": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1850.916400] env[62510]: DEBUG oslo_concurrency.lockutils [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] Acquired lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.916687] env[62510]: DEBUG nova.network.neutron [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Refreshing network info cache for port f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1850.918908] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:d0:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6eb7e3e9-5cc2-40f1-a6eb-f70f06531667', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5b21632-114e-43ff-8c8e-a6ff44e674eb', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1850.927442] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Creating folder: Project (11c021c6b45c452f83732fe578e576f6). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1850.929134] env[62510]: INFO nova.scheduler.client.report [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Deleted allocations for instance ebd2dc4b-8d74-47db-861e-870d41a4150b [ 1850.933593] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3df77000-3885-46e6-8c32-0e30bc3fffe7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.951783] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Created folder: Project (11c021c6b45c452f83732fe578e576f6) in parent group-v367197. [ 1850.952057] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Creating folder: Instances. Parent ref: group-v367456. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1850.952347] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e97a4fb8-4cfa-4e80-9962-ac3dd9961e48 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.965164] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Created folder: Instances in parent group-v367456. [ 1850.965686] env[62510]: DEBUG oslo.service.loopingcall [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1850.969136] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1850.969910] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769380, 'name': Rename_Task, 'duration_secs': 0.267009} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.970170] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7591e36d-d6a9-4654-b93b-4687c0b27649 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.989383] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1850.990161] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64f7dfa3-d0aa-4ca1-a92f-16a3a9bda4e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.998696] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1850.998696] env[62510]: value = "task-1769384" [ 1850.998696] env[62510]: _type = "Task" [ 1850.998696] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.000192] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1851.000192] env[62510]: value = "task-1769385" [ 1851.000192] env[62510]: _type = "Task" [ 1851.000192] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.020292] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769385, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.023620] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769384, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.056061] env[62510]: DEBUG nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1851.091518] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1851.091826] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1851.092021] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1851.092225] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1851.092668] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1851.092668] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1851.092785] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1851.093022] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1851.093141] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1851.093278] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1851.093457] env[62510]: DEBUG nova.virt.hardware [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1851.094433] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699325ae-8a76-4a90-8dd5-5d25db9dd90c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.103716] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426b09c0-ae9b-4494-926f-56f03cfeac21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.188836] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769381, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.203785] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2c5c38c1-511f-4aae-969a-eb6de128fae7] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1851.256200] env[62510]: DEBUG nova.compute.manager [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1851.256473] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.257459] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ccf0f8-cd74-4946-bee1-303310c4b4a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.266937] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1851.267232] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58ef08f7-6456-4ca6-a314-0082ed54c4c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.276061] env[62510]: DEBUG oslo_vmware.api [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1851.276061] env[62510]: value = "task-1769386" [ 1851.276061] env[62510]: _type = "Task" [ 1851.276061] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.286948] env[62510]: DEBUG oslo_vmware.api [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.394723] env[62510]: DEBUG nova.objects.instance [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'numa_topology' on Instance uuid 313f7916-0737-4e44-ae2f-58301934bf06 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1851.443927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0de3ec7b-ac20-44d7-92d2-c771538d1888 tempest-ServerDiskConfigTestJSON-921990528 tempest-ServerDiskConfigTestJSON-921990528-project-member] Lock "ebd2dc4b-8d74-47db-861e-870d41a4150b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.873s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.510373] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769384, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.517157] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769385, 'name': CreateVM_Task, 'duration_secs': 0.421976} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.517157] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1851.517711] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.518482] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.518482] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1851.518482] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-664808cc-aa02-49fa-8b9a-2d53115e6d46 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.525091] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1851.525091] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524da66b-186e-9bff-be84-abe871ce35f7" [ 1851.525091] env[62510]: _type = "Task" [ 1851.525091] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.532180] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524da66b-186e-9bff-be84-abe871ce35f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.687434] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769381, 'name': ReconfigVM_Task, 'duration_secs': 0.549573} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.688032] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/5f229f78-6c5d-4170-bdd4-c5522b137949.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1851.688545] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44156dce-f908-401f-9f04-286e2c231867 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.696833] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1851.696833] env[62510]: value = "task-1769387" [ 1851.696833] env[62510]: _type = "Task" [ 1851.696833] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.710796] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 83fa0d32-18ee-401d-af0b-a0adb538e5f4] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1851.712750] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769387, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.788547] env[62510]: DEBUG oslo_vmware.api [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769386, 'name': PowerOffVM_Task, 'duration_secs': 0.221263} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.790234] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1851.790234] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1851.790234] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1bc79b2-db70-4f12-bf7e-cc9776cedd22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.884163] env[62510]: DEBUG nova.network.neutron [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Updated VIF entry in instance network info cache for port f5b21632-114e-43ff-8c8e-a6ff44e674eb. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1851.884568] env[62510]: DEBUG nova.network.neutron [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Updating instance_info_cache with network_info: [{"id": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "address": "fa:16:3e:2a:d0:51", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b21632-11", "ovs_interfaceid": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.899026] env[62510]: DEBUG nova.objects.base [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Object Instance<313f7916-0737-4e44-ae2f-58301934bf06> lazy-loaded attributes: resources,numa_topology {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1851.950407] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1851.950939] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1851.950939] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleting the datastore file [datastore1] e7e053be-cb88-4ae0-b157-3006211f77d9 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1851.951203] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8259ee73-fca1-48ff-a95b-beca6dfc2d35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.959988] env[62510]: DEBUG oslo_vmware.api [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1851.959988] env[62510]: value = "task-1769389" [ 1851.959988] env[62510]: _type = "Task" [ 1851.959988] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.968716] env[62510]: DEBUG oslo_vmware.api [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.013215] env[62510]: DEBUG oslo_vmware.api [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769384, 'name': PowerOnVM_Task, 'duration_secs': 0.718855} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.013499] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1852.013709] env[62510]: INFO nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Took 10.31 seconds to spawn the instance on the hypervisor. [ 1852.013886] env[62510]: DEBUG nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1852.014721] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb0b82f-2438-49f8-8aa7-008ad5215b73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.038470] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524da66b-186e-9bff-be84-abe871ce35f7, 'name': SearchDatastore_Task, 'duration_secs': 0.011667} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.039074] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.039234] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1852.039512] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.039663] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.039845] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1852.040224] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38855d90-8726-4895-a74f-4a479c2a08ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.058700] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1852.058861] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1852.060039] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23320c2b-f0ce-4aaf-81c4-1a42b74b97d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.066079] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1852.066079] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523abf25-975e-6652-1710-221894fe11f5" [ 1852.066079] env[62510]: _type = "Task" [ 1852.066079] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.077125] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523abf25-975e-6652-1710-221894fe11f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.164373] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6703dc-39ea-419b-becc-d56f6cc2ef88 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.172825] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401f5a37-43af-4b66-a1d5-c5fe84de7706 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.212195] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6186cca-c360-473a-85e0-394f7a058737 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.215333] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 3533a113-6f46-4b18-872d-9bc1b0481969] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1852.223726] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769387, 'name': Rename_Task, 'duration_secs': 0.163164} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.225816] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1852.226328] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab774569-806a-4c3d-8a41-6f16e3739056 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.228879] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051bfd5d-4d72-4b91-825e-0dc08d5f0ebe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.245815] env[62510]: DEBUG nova.compute.provider_tree [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1852.249375] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1852.249375] env[62510]: value = "task-1769390" [ 1852.249375] env[62510]: _type = "Task" [ 1852.249375] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.260084] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769390, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.394859] env[62510]: DEBUG oslo_concurrency.lockutils [req-9e674cde-a4d4-4a6f-adfa-9fd29edb3a8e req-71b6631f-86e2-48e0-bf82-5f8c9eea4b60 service nova] Releasing lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.471297] env[62510]: DEBUG oslo_vmware.api [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179183} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.471765] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1852.471980] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1852.472830] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1852.472830] env[62510]: INFO nova.compute.manager [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1852.473058] env[62510]: DEBUG oslo.service.loopingcall [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.473581] env[62510]: DEBUG nova.compute.manager [-] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1852.473581] env[62510]: DEBUG nova.network.neutron [-] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1852.540223] env[62510]: INFO nova.compute.manager [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Took 31.26 seconds to build instance. [ 1852.584156] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523abf25-975e-6652-1710-221894fe11f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010723} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.585527] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c5963f4-6c30-42e3-86dd-0de83ef07d05 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.598049] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1852.598049] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524957f3-9044-5129-6753-e22b0e310d64" [ 1852.598049] env[62510]: _type = "Task" [ 1852.598049] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.608330] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524957f3-9044-5129-6753-e22b0e310d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.615986] env[62510]: DEBUG nova.compute.manager [req-b6047f90-885f-4623-b1c8-ac705d6560c9 req-a5adb361-607f-4d2a-b44b-f49a3e6593e9 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Received event network-vif-plugged-68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1852.615986] env[62510]: DEBUG oslo_concurrency.lockutils [req-b6047f90-885f-4623-b1c8-ac705d6560c9 req-a5adb361-607f-4d2a-b44b-f49a3e6593e9 service nova] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.616138] env[62510]: DEBUG oslo_concurrency.lockutils [req-b6047f90-885f-4623-b1c8-ac705d6560c9 req-a5adb361-607f-4d2a-b44b-f49a3e6593e9 service nova] Lock "92cb4e54-a00e-4974-b134-22d302932e32-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.616208] env[62510]: DEBUG oslo_concurrency.lockutils [req-b6047f90-885f-4623-b1c8-ac705d6560c9 req-a5adb361-607f-4d2a-b44b-f49a3e6593e9 service nova] Lock "92cb4e54-a00e-4974-b134-22d302932e32-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.616374] env[62510]: DEBUG nova.compute.manager [req-b6047f90-885f-4623-b1c8-ac705d6560c9 req-a5adb361-607f-4d2a-b44b-f49a3e6593e9 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] No waiting events found dispatching network-vif-plugged-68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1852.616543] env[62510]: WARNING nova.compute.manager [req-b6047f90-885f-4623-b1c8-ac705d6560c9 req-a5adb361-607f-4d2a-b44b-f49a3e6593e9 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Received unexpected event network-vif-plugged-68c246e2-7126-4f5b-bc52-3c63f14aacf5 for instance with vm_state building and task_state spawning. [ 1852.719066] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 58e71d67-aed2-4329-ab60-4dfacff1d0a2] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1852.751332] env[62510]: DEBUG nova.scheduler.client.report [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1852.765604] env[62510]: DEBUG oslo_vmware.api [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769390, 'name': PowerOnVM_Task, 'duration_secs': 0.505756} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.765957] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1852.766103] env[62510]: INFO nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1852.766290] env[62510]: DEBUG nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1852.767362] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cdc02f-925a-4538-83b6-5581a6334e04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.043499] env[62510]: DEBUG oslo_concurrency.lockutils [None req-96cc7432-4fd3-45c5-a13d-f2170cb9570b tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.772s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.047277] env[62510]: DEBUG nova.compute.manager [req-fe3d9883-aa90-4c76-9fd0-9a0e075abb6d req-ae44ba86-97eb-45bb-b5c5-2c105a9d4ee5 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Received event network-vif-deleted-b9d1d288-41c0-4355-a940-4e80836ad286 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1853.047451] env[62510]: INFO nova.compute.manager [req-fe3d9883-aa90-4c76-9fd0-9a0e075abb6d req-ae44ba86-97eb-45bb-b5c5-2c105a9d4ee5 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Neutron deleted interface b9d1d288-41c0-4355-a940-4e80836ad286; detaching it from the instance and deleting it from the info cache [ 1853.048102] env[62510]: DEBUG nova.network.neutron [req-fe3d9883-aa90-4c76-9fd0-9a0e075abb6d req-ae44ba86-97eb-45bb-b5c5-2c105a9d4ee5 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.108152] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524957f3-9044-5129-6753-e22b0e310d64, 'name': SearchDatastore_Task, 'duration_secs': 0.011217} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.108423] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.108704] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3/6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1853.108958] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f561ff43-21ae-4ba0-bbe5-c1db478be051 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.116619] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1853.116619] env[62510]: value = "task-1769391" [ 1853.116619] env[62510]: _type = "Task" [ 1853.116619] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.125710] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.136300] env[62510]: DEBUG nova.network.neutron [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Successfully updated port: 68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1853.222872] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: b004fba7-13e0-40f0-827d-8d09b7717176] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1853.259475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.375s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.261988] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.847s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.262295] env[62510]: DEBUG nova.objects.instance [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lazy-loading 'resources' on Instance uuid 31772dc9-4f04-42df-9e3b-3200cc72c977 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1853.288849] env[62510]: INFO nova.compute.manager [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Took 31.06 seconds to build instance. [ 1853.461030] env[62510]: DEBUG nova.network.neutron [-] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.555297] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7918463-5dda-4871-9252-23b2eba8a7c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.567585] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d24b8a-940c-4e9b-9f67-8973dcdee3a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.613196] env[62510]: DEBUG nova.compute.manager [req-fe3d9883-aa90-4c76-9fd0-9a0e075abb6d req-ae44ba86-97eb-45bb-b5c5-2c105a9d4ee5 service nova] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Detach interface failed, port_id=b9d1d288-41c0-4355-a940-4e80836ad286, reason: Instance e7e053be-cb88-4ae0-b157-3006211f77d9 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1853.626589] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769391, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.642633] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.642789] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.642944] env[62510]: DEBUG nova.network.neutron [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1853.702252] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.705018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.705018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.705018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.705018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.705804] env[62510]: INFO nova.compute.manager [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Terminating instance [ 1853.729025] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4f9bfb02-8aea-45a9-85ea-97e70f0d41fb] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1853.735924] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "841460b0-d917-44ea-88c6-0e5a3022f658" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.736223] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.736430] env[62510]: DEBUG nova.compute.manager [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1853.737742] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9723e10b-afa9-4dac-a641-740f0afd401d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.748049] env[62510]: DEBUG nova.compute.manager [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1853.748704] env[62510]: DEBUG nova.objects.instance [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'flavor' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1853.774289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-708d83bc-8f7e-4cb7-b452-b3fed0081646 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 33.228s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.774289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 11.459s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.774289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "313f7916-0737-4e44-ae2f-58301934bf06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.774289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.774289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.776857] env[62510]: INFO nova.compute.manager [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Terminating instance [ 1853.792421] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1e8d7dc-4563-4fe8-b8ac-cf80b4284d8c tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.564s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.963969] env[62510]: INFO nova.compute.manager [-] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Took 1.49 seconds to deallocate network for instance. [ 1854.053125] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec36519-a12d-4718-875c-570ef5c1e402 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.065029] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc144c2-57df-4eda-b811-089ddfcd5555 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.103778] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87a8651-b58a-4460-8c89-e2024f0d6e7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.113316] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4fc6c2-abd3-481b-b240-9a6a128df0fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.135127] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769391, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.942263} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.142972] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3/6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1854.143241] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1854.143725] env[62510]: DEBUG nova.compute.provider_tree [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.145121] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f20a3117-810c-4994-a789-8bf14477eb0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.156736] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1854.156736] env[62510]: value = "task-1769392" [ 1854.156736] env[62510]: _type = "Task" [ 1854.156736] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.169373] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769392, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.212953] env[62510]: DEBUG nova.network.neutron [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1854.217173] env[62510]: DEBUG nova.compute.manager [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1854.217548] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1854.218583] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d544c79-5a90-4cf9-8e52-4226ae1b5c1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.230498] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0029d975-bd48-4558-9f41-a0cf91336393] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1854.232677] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1854.233825] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0d312bd-8a9b-4dfc-a23e-f22523e3dae8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.243467] env[62510]: DEBUG oslo_vmware.api [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1854.243467] env[62510]: value = "task-1769393" [ 1854.243467] env[62510]: _type = "Task" [ 1854.243467] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.261554] env[62510]: DEBUG oslo_vmware.api [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.281360] env[62510]: DEBUG nova.compute.manager [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1854.281637] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1854.282311] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5cda9a50-b7e0-41e3-91de-b6d30366a1af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.294519] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc3e2e1-e65b-4a17-9596-242bdc4aeded {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.340283] env[62510]: WARNING nova.virt.vmwareapi.vmops [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 313f7916-0737-4e44-ae2f-58301934bf06 could not be found. [ 1854.340510] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1854.340701] env[62510]: INFO nova.compute.manager [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1854.342569] env[62510]: DEBUG oslo.service.loopingcall [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1854.342569] env[62510]: DEBUG nova.compute.manager [-] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1854.342569] env[62510]: DEBUG nova.network.neutron [-] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1854.476588] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.611837] env[62510]: DEBUG nova.network.neutron [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updating instance_info_cache with network_info: [{"id": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "address": "fa:16:3e:05:6e:0f", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68c246e2-71", "ovs_interfaceid": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.621472] env[62510]: DEBUG nova.compute.manager [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Received event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1854.621472] env[62510]: DEBUG nova.compute.manager [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing instance network info cache due to event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1854.621472] env[62510]: DEBUG oslo_concurrency.lockutils [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] Acquiring lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.621472] env[62510]: DEBUG oslo_concurrency.lockutils [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] Acquired lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.621915] env[62510]: DEBUG nova.network.neutron [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1854.651451] env[62510]: DEBUG nova.scheduler.client.report [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1854.672546] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769392, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091152} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.673091] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1854.673992] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f704e814-6cb8-4150-a563-cc0dda4d5c63 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.702744] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3/6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1854.703555] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67f9ef40-ea9e-476c-8f6e-e2011cc32751 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.729018] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1854.729018] env[62510]: value = "task-1769394" [ 1854.729018] env[62510]: _type = "Task" [ 1854.729018] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.733855] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: b7c2c768-573b-4c1c-ade7-45fb87b95d41] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1854.739361] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769394, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.754275] env[62510]: DEBUG oslo_vmware.api [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769393, 'name': PowerOffVM_Task, 'duration_secs': 0.249009} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.754811] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1854.755014] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1854.755430] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b370eae0-4c69-4150-b161-5205b725fff1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.759160] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1854.759957] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f95e9c6f-886a-472a-87f3-6dbd13c036de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.767727] env[62510]: DEBUG oslo_vmware.api [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1854.767727] env[62510]: value = "task-1769395" [ 1854.767727] env[62510]: _type = "Task" [ 1854.767727] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.781168] env[62510]: DEBUG oslo_vmware.api [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769395, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.024191] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1855.024191] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1855.024191] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleting the datastore file [datastore1] 2e24b76d-a770-4f1e-a8f1-a54417f1be81 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1855.024191] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcd20244-4a68-4096-92f3-e2746541193c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.036103] env[62510]: DEBUG oslo_vmware.api [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for the task: (returnval){ [ 1855.036103] env[62510]: value = "task-1769397" [ 1855.036103] env[62510]: _type = "Task" [ 1855.036103] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.045237] env[62510]: DEBUG oslo_vmware.api [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769397, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.092695] env[62510]: DEBUG nova.compute.manager [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Received event network-changed-68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1855.092695] env[62510]: DEBUG nova.compute.manager [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Refreshing instance network info cache due to event network-changed-68c246e2-7126-4f5b-bc52-3c63f14aacf5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1855.092695] env[62510]: DEBUG oslo_concurrency.lockutils [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] Acquiring lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.115377] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.115748] env[62510]: DEBUG nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Instance network_info: |[{"id": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "address": "fa:16:3e:05:6e:0f", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68c246e2-71", "ovs_interfaceid": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1855.116273] env[62510]: DEBUG oslo_concurrency.lockutils [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] Acquired lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.116573] env[62510]: DEBUG nova.network.neutron [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Refreshing network info cache for port 68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1855.117609] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:6e:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '68c246e2-7126-4f5b-bc52-3c63f14aacf5', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1855.127243] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Creating folder: Project (59300e0f20144d9f88b78f7c971e86c9). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1855.133026] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96393673-7fc1-4f60-a384-ef3605e572de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.144827] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Created folder: Project (59300e0f20144d9f88b78f7c971e86c9) in parent group-v367197. [ 1855.145052] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Creating folder: Instances. Parent ref: group-v367459. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1855.145296] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d528e960-f64b-4209-b3a7-cfcfccb2eac4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.158226] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.896s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.160280] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Created folder: Instances in parent group-v367459. [ 1855.161995] env[62510]: DEBUG oslo.service.loopingcall [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1855.161995] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.785s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.163843] env[62510]: INFO nova.compute.claims [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1855.165494] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1855.166458] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fdad0599-a37a-4cfe-9963-497702eac4b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.188332] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1855.188332] env[62510]: value = "task-1769400" [ 1855.188332] env[62510]: _type = "Task" [ 1855.188332] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.197600] env[62510]: INFO nova.scheduler.client.report [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Deleted allocations for instance 31772dc9-4f04-42df-9e3b-3200cc72c977 [ 1855.202439] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769400, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.243937] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 7cc6d4a6-2765-44e7-b378-e213a562593d] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1855.246254] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769394, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.283328] env[62510]: DEBUG oslo_vmware.api [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769395, 'name': PowerOffVM_Task, 'duration_secs': 0.254595} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.283420] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1855.283652] env[62510]: DEBUG nova.compute.manager [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1855.285492] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e320e62f-ac46-41db-b71f-2f2ea2cfe5b4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.355502] env[62510]: DEBUG nova.network.neutron [-] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.549067] env[62510]: DEBUG oslo_vmware.api [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Task: {'id': task-1769397, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442306} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.549498] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1855.549654] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1855.550160] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1855.550160] env[62510]: INFO nova.compute.manager [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1855.550551] env[62510]: DEBUG oslo.service.loopingcall [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1855.550657] env[62510]: DEBUG nova.compute.manager [-] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1855.550827] env[62510]: DEBUG nova.network.neutron [-] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1855.703149] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769400, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.712484] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4becc2e2-7669-4464-85f2-c4f31b985e15 tempest-ServerShowV247Test-1219505604 tempest-ServerShowV247Test-1219505604-project-member] Lock "31772dc9-4f04-42df-9e3b-3200cc72c977" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.594s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.743084] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769394, 'name': ReconfigVM_Task, 'duration_secs': 0.894851} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.743781] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3/6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1855.744183] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f04c7e77-5799-434e-8d3e-e0d4e2a47a6b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.750834] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e638d2d6-2a8e-4f69-b1d7-fef7ca4408c7] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1855.755897] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1855.755897] env[62510]: value = "task-1769401" [ 1855.755897] env[62510]: _type = "Task" [ 1855.755897] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.765483] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769401, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.780609] env[62510]: DEBUG nova.network.neutron [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updated VIF entry in instance network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.781370] env[62510]: DEBUG nova.network.neutron [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.794667] env[62510]: DEBUG nova.network.neutron [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updated VIF entry in instance network info cache for port 68c246e2-7126-4f5b-bc52-3c63f14aacf5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1855.795929] env[62510]: DEBUG nova.network.neutron [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updating instance_info_cache with network_info: [{"id": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "address": "fa:16:3e:05:6e:0f", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68c246e2-71", "ovs_interfaceid": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1855.800353] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ece853b-718f-49f0-9a68-1ffc040bcbd1 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1855.857992] env[62510]: INFO nova.compute.manager [-] [instance: 313f7916-0737-4e44-ae2f-58301934bf06] Took 1.52 seconds to deallocate network for instance. [ 1856.212020] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769400, 'name': CreateVM_Task, 'duration_secs': 0.560669} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.212020] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1856.212020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.212020] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.213894] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1856.214196] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aee7e50-963e-4a8d-8147-9af5ca287a82 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.230037] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1856.230037] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52627992-8a9a-e9d3-1017-3100546bd275" [ 1856.230037] env[62510]: _type = "Task" [ 1856.230037] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.248423] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52627992-8a9a-e9d3-1017-3100546bd275, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.259388] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8bbafd7f-cdd1-4246-a509-2f97a6f78497] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1856.271076] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769401, 'name': Rename_Task, 'duration_secs': 0.442495} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.271397] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1856.271640] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78f596b7-37af-4c41-ab46-9a0daa387282 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.282362] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1856.282362] env[62510]: value = "task-1769402" [ 1856.282362] env[62510]: _type = "Task" [ 1856.282362] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.286330] env[62510]: DEBUG oslo_concurrency.lockutils [req-44f1daf4-2c00-4cce-abb7-d8902451a255 req-f2f344c0-2aa6-4da4-840e-7b5136322d76 service nova] Releasing lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.292439] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.301120] env[62510]: DEBUG oslo_concurrency.lockutils [req-dce65954-2f71-4f6a-951a-1e27d4043329 req-dbdf3c31-8502-4b80-83df-752cc49e6733 service nova] Releasing lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.399580] env[62510]: DEBUG nova.network.neutron [-] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.547842] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43dd804c-7de0-40fa-a781-ad18acb1b17f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.558562] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751d9d73-3063-4e37-9a2a-35321e5311b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.595952] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb3a20b-470a-47b8-a423-03489450ba36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.604423] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c9f345-2d0d-4fa4-8d26-ab29979bcf9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.620732] env[62510]: DEBUG nova.compute.provider_tree [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1856.660146] env[62510]: DEBUG nova.compute.manager [req-2f277293-7cf8-48f9-b683-ac9c830d7b9c req-3af62c4d-02c5-478c-9951-4fc9f7752143 service nova] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Received event network-vif-deleted-965c7e31-fbcc-4660-900b-d657b5aa8abb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1856.744359] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52627992-8a9a-e9d3-1017-3100546bd275, 'name': SearchDatastore_Task, 'duration_secs': 0.025236} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.744683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.744922] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1856.745205] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.745389] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.745764] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1856.745855] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9de0aa11-0302-4c10-aa4a-12e765912fbe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.757297] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1856.757297] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1856.757297] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e3be13d-5ff3-4320-bbda-d0fe064dd069 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.762785] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1856.762785] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b63655-c02e-a6db-e7f3-9473b4320da4" [ 1856.762785] env[62510]: _type = "Task" [ 1856.762785] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.767523] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 34a464e2-d38e-4c24-a487-c62a4f484667] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1856.778519] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b63655-c02e-a6db-e7f3-9473b4320da4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.796341] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769402, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.900926] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f472b493-89ac-4a3a-8cb7-b7709651763a tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "313f7916-0737-4e44-ae2f-58301934bf06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.128s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.908344] env[62510]: INFO nova.compute.manager [-] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Took 1.36 seconds to deallocate network for instance. [ 1856.942021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.942021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.985396] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "82dceacf-1898-4d86-b1c6-552a24ab565f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.985678] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.126225] env[62510]: DEBUG nova.scheduler.client.report [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1857.275650] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b63655-c02e-a6db-e7f3-9473b4320da4, 'name': SearchDatastore_Task, 'duration_secs': 0.015372} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.277510] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35f7edb5-ebe4-4fa3-a64f-b0c449518888 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.280293] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 612e95d6-28ef-4c9a-b5d9-fd83122bfa44] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1857.293060] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1857.293060] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52025076-483e-18fc-9fa9-9755616706a4" [ 1857.293060] env[62510]: _type = "Task" [ 1857.293060] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.305481] env[62510]: DEBUG oslo_vmware.api [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769402, 'name': PowerOnVM_Task, 'duration_secs': 0.754558} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.308288] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1857.308288] env[62510]: INFO nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Took 10.66 seconds to spawn the instance on the hypervisor. [ 1857.308288] env[62510]: DEBUG nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1857.308288] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4b48d0-f846-43df-808a-3b0f5c81fa46 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.317271] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52025076-483e-18fc-9fa9-9755616706a4, 'name': SearchDatastore_Task, 'duration_secs': 0.016259} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.317951] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.318229] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 92cb4e54-a00e-4974-b134-22d302932e32/92cb4e54-a00e-4974-b134-22d302932e32.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1857.318493] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc22dd86-2ae1-416e-9029-7518f46d620b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.331386] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1857.331386] env[62510]: value = "task-1769403" [ 1857.331386] env[62510]: _type = "Task" [ 1857.331386] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.340690] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.415431] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.442758] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1857.474009] env[62510]: DEBUG nova.compute.manager [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Stashing vm_state: stopped {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1857.489738] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1857.634728] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.635292] env[62510]: DEBUG nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1857.641549] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.314s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.641729] env[62510]: DEBUG nova.objects.instance [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1857.786037] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 12768001-6ed0-47be-8f20-c59ee82b842a] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 1857.846041] env[62510]: INFO nova.compute.manager [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Took 34.97 seconds to build instance. [ 1857.863128] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769403, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.967497] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.998246] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.017304] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.146215] env[62510]: DEBUG nova.compute.utils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1858.162043] env[62510]: DEBUG nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1858.162043] env[62510]: DEBUG nova.network.neutron [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1858.262445] env[62510]: DEBUG nova.policy [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1858.278338] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "0c93a909-d08f-466c-bdef-a26fa35cd944" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.278695] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.290048] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.290204] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances with incomplete migration {{(pid=62510) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 1858.347998] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769403, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71303} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.348353] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 92cb4e54-a00e-4974-b134-22d302932e32/92cb4e54-a00e-4974-b134-22d302932e32.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1858.348580] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1858.348835] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa2c570c-3cdc-460f-a079-bb82215c6d57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.352443] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2ff31e95-7b5d-4178-a27a-7c37300cea2e tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.492s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.357037] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1858.357037] env[62510]: value = "task-1769404" [ 1858.357037] env[62510]: _type = "Task" [ 1858.357037] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.365933] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769404, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.664845] env[62510]: DEBUG nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1858.671683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-db89f69b-86e3-488b-88af-de1c3545c2ab tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.030s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.673615] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.197s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.674083] env[62510]: DEBUG nova.objects.instance [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'resources' on Instance uuid e7e053be-cb88-4ae0-b157-3006211f77d9 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1858.784194] env[62510]: DEBUG nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1858.793369] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.862154] env[62510]: DEBUG nova.network.neutron [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Successfully created port: f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1858.878141] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769404, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.213537} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.878141] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1858.878141] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0bbb3d-c37e-402f-ba3e-79dcea7f6f57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.905120] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 92cb4e54-a00e-4974-b134-22d302932e32/92cb4e54-a00e-4974-b134-22d302932e32.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1858.905578] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e98bfefc-750c-4636-aba8-cf75741128a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.929028] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1858.929028] env[62510]: value = "task-1769405" [ 1858.929028] env[62510]: _type = "Task" [ 1858.929028] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.940848] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.947818] env[62510]: DEBUG nova.compute.manager [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Received event network-changed-f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1858.948053] env[62510]: DEBUG nova.compute.manager [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Refreshing instance network info cache due to event network-changed-f5b21632-114e-43ff-8c8e-a6ff44e674eb. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1858.948311] env[62510]: DEBUG oslo_concurrency.lockutils [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] Acquiring lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.948485] env[62510]: DEBUG oslo_concurrency.lockutils [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] Acquired lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.948688] env[62510]: DEBUG nova.network.neutron [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Refreshing network info cache for port f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1859.310818] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.439830] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769405, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.479280] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fd90e9-0fa0-4341-813d-38d99607c306 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.487333] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf1ffc0-923e-46f0-8268-ec74da58414e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.521337] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ca019e-4897-4b99-8d9b-17ed66ac195e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.532201] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d89071-cffd-4a38-8a5b-5c29345a213e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.550438] env[62510]: DEBUG nova.compute.provider_tree [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.682419] env[62510]: DEBUG nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1859.707660] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1859.707970] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1859.708155] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1859.708352] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1859.708506] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1859.708659] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1859.708891] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1859.709079] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1859.709261] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1859.709420] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1859.709591] env[62510]: DEBUG nova.virt.hardware [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1859.710497] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c2e897-58a7-4385-a80b-119458e10d48 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.722020] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f7717e-cee6-4185-8491-f2ee0f2910c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.813991] env[62510]: DEBUG nova.network.neutron [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Updated VIF entry in instance network info cache for port f5b21632-114e-43ff-8c8e-a6ff44e674eb. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1859.814297] env[62510]: DEBUG nova.network.neutron [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Updating instance_info_cache with network_info: [{"id": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "address": "fa:16:3e:2a:d0:51", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b21632-11", "ovs_interfaceid": "f5b21632-114e-43ff-8c8e-a6ff44e674eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.943568] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769405, 'name': ReconfigVM_Task, 'duration_secs': 0.957617} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.943568] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 92cb4e54-a00e-4974-b134-22d302932e32/92cb4e54-a00e-4974-b134-22d302932e32.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1859.944255] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af721475-fb8c-4830-98d2-aff6712a8255 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.952138] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1859.952138] env[62510]: value = "task-1769406" [ 1859.952138] env[62510]: _type = "Task" [ 1859.952138] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.963138] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769406, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.053987] env[62510]: DEBUG nova.scheduler.client.report [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1860.320644] env[62510]: DEBUG oslo_concurrency.lockutils [req-24e6eeb2-0a9b-40be-9de6-1d6e11df5320 req-376cb658-b31e-4e46-883c-e41d036ae4e3 service nova] Releasing lock "refresh_cache-6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.463843] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769406, 'name': Rename_Task, 'duration_secs': 0.206735} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.464308] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1860.464588] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff82c090-a849-4fbf-b08d-779da9a436f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.472671] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1860.472671] env[62510]: value = "task-1769407" [ 1860.472671] env[62510]: _type = "Task" [ 1860.472671] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.481949] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769407, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.482796] env[62510]: DEBUG nova.network.neutron [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Successfully updated port: f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1860.562016] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.889s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.565262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.150s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.565677] env[62510]: DEBUG nova.objects.instance [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lazy-loading 'resources' on Instance uuid 2e24b76d-a770-4f1e-a8f1-a54417f1be81 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.597131] env[62510]: INFO nova.scheduler.client.report [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted allocations for instance e7e053be-cb88-4ae0-b157-3006211f77d9 [ 1860.717998] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.718269] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.974074] env[62510]: DEBUG nova.compute.manager [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-vif-plugged-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1860.974417] env[62510]: DEBUG oslo_concurrency.lockutils [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.974738] env[62510]: DEBUG oslo_concurrency.lockutils [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.975043] env[62510]: DEBUG oslo_concurrency.lockutils [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.975316] env[62510]: DEBUG nova.compute.manager [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] No waiting events found dispatching network-vif-plugged-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1860.975593] env[62510]: WARNING nova.compute.manager [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received unexpected event network-vif-plugged-f1d12594-5d5a-4965-a017-3b055a432283 for instance with vm_state building and task_state spawning. [ 1860.975868] env[62510]: DEBUG nova.compute.manager [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-changed-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1860.976154] env[62510]: DEBUG nova.compute.manager [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing instance network info cache due to event network-changed-f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1860.976437] env[62510]: DEBUG oslo_concurrency.lockutils [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.976670] env[62510]: DEBUG oslo_concurrency.lockutils [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.976923] env[62510]: DEBUG nova.network.neutron [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing network info cache for port f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1860.988475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.996785] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769407, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.065477] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.066277] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.066500] env[62510]: INFO nova.compute.manager [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Shelving [ 1861.106324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-892a0028-30de-442b-a4f9-82c0483972c4 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "e7e053be-cb88-4ae0-b157-3006211f77d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.357s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.222113] env[62510]: DEBUG nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1861.383030] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3003d33c-35db-483f-a73d-d67235d58073 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.395186] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8447f7-a017-4692-8be4-7497d8ab7b70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.428722] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849451b8-71e7-4b28-975a-e684dd6aac15 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.437296] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f2b4264-6c26-4e9b-9216-4026ac910800 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.456261] env[62510]: DEBUG nova.compute.provider_tree [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.489050] env[62510]: DEBUG oslo_vmware.api [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769407, 'name': PowerOnVM_Task, 'duration_secs': 0.824903} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.489329] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1861.489526] env[62510]: INFO nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Took 10.43 seconds to spawn the instance on the hypervisor. [ 1861.489700] env[62510]: DEBUG nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1861.490541] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a7d5a8-cf33-4961-922f-3ce9e00ee8fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.528069] env[62510]: DEBUG nova.network.neutron [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1861.633722] env[62510]: DEBUG nova.network.neutron [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1861.742496] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.961473] env[62510]: DEBUG nova.scheduler.client.report [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1862.014603] env[62510]: INFO nova.compute.manager [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Took 25.76 seconds to build instance. [ 1862.065180] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "c2be17de-175a-401f-8c53-f785aeecfff4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1862.066589] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.078494] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1862.079020] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4484090-dc3c-4456-9a6a-5ad8c6c79063 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.090373] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1862.090373] env[62510]: value = "task-1769408" [ 1862.090373] env[62510]: _type = "Task" [ 1862.090373] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.102749] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.137344] env[62510]: DEBUG oslo_concurrency.lockutils [req-b63666dd-89f4-4570-b047-fe3070862a47 req-049b72bd-eb51-4d09-a6d8-ca8c4b85ecbe service nova] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.138572] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.138572] env[62510]: DEBUG nova.network.neutron [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1862.471946] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.474697] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.507s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.476496] env[62510]: INFO nova.compute.claims [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1862.491609] env[62510]: INFO nova.scheduler.client.report [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Deleted allocations for instance 2e24b76d-a770-4f1e-a8f1-a54417f1be81 [ 1862.518577] env[62510]: DEBUG oslo_concurrency.lockutils [None req-dbda1abd-d06f-4c87-8883-3d3216fd76a8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.277s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.569778] env[62510]: DEBUG nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1862.605902] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769408, 'name': PowerOffVM_Task, 'duration_secs': 0.214502} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.606247] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1862.607156] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf3734b-5ffc-46ce-a3af-0aa03278147c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.633739] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2a41d8-f5bb-43f6-9d92-9c7ed4cd0c76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.683699] env[62510]: DEBUG nova.network.neutron [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1862.902745] env[62510]: DEBUG nova.network.neutron [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.923388] env[62510]: DEBUG nova.compute.manager [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Received event network-changed-68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1862.923644] env[62510]: DEBUG nova.compute.manager [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Refreshing instance network info cache due to event network-changed-68c246e2-7126-4f5b-bc52-3c63f14aacf5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1862.923872] env[62510]: DEBUG oslo_concurrency.lockutils [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] Acquiring lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.924250] env[62510]: DEBUG oslo_concurrency.lockutils [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] Acquired lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.924473] env[62510]: DEBUG nova.network.neutron [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Refreshing network info cache for port 68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1862.998901] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fa74d3f-6705-44f3-b1fa-06ab045c682c tempest-ImagesOneServerNegativeTestJSON-1835184829 tempest-ImagesOneServerNegativeTestJSON-1835184829-project-member] Lock "2e24b76d-a770-4f1e-a8f1-a54417f1be81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.296s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.096333] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.148147] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1863.148902] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-00aa55c7-9544-439c-bc28-09b81137be8c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.158884] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1863.158884] env[62510]: value = "task-1769409" [ 1863.158884] env[62510]: _type = "Task" [ 1863.158884] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.169076] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769409, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.408758] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.409241] env[62510]: DEBUG nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Instance network_info: |[{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1863.409762] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:3b:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1d12594-5d5a-4965-a017-3b055a432283', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1863.425020] env[62510]: DEBUG oslo.service.loopingcall [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1863.425020] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1863.425292] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2deb7864-20f3-4da7-b37d-19cb5cb845c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.453543] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1863.453543] env[62510]: value = "task-1769410" [ 1863.453543] env[62510]: _type = "Task" [ 1863.453543] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.462943] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769410, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.672925] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769409, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.825430] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1362646e-729d-4673-9550-9c1f9abd629c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.833346] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd6b7d3-ea52-493e-aac5-6bedcbc50bdb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.838831] env[62510]: DEBUG nova.network.neutron [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updated VIF entry in instance network info cache for port 68c246e2-7126-4f5b-bc52-3c63f14aacf5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1863.839217] env[62510]: DEBUG nova.network.neutron [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updating instance_info_cache with network_info: [{"id": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "address": "fa:16:3e:05:6e:0f", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68c246e2-71", "ovs_interfaceid": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.867860] env[62510]: DEBUG oslo_concurrency.lockutils [req-06a45968-967f-4251-ab17-b30ff7efc72d req-fdcd3fab-bad1-4e31-a5ad-bca85242787a service nova] Releasing lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.868702] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55eab3c-408e-41df-ad9c-f361c3630ed2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.877166] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947f0855-943f-4978-9337-ea016e787740 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.891565] env[62510]: DEBUG nova.compute.provider_tree [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1863.965443] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769410, 'name': CreateVM_Task, 'duration_secs': 0.367038} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.965653] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1863.966489] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.966666] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.966991] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1863.967287] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0f1bd2e-db79-4b0b-8082-9386e5c8964b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.972962] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1863.972962] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f4d821-8241-4250-db91-fa3f574c95bf" [ 1863.972962] env[62510]: _type = "Task" [ 1863.972962] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.981723] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f4d821-8241-4250-db91-fa3f574c95bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.171620] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769409, 'name': CreateSnapshot_Task, 'duration_secs': 0.630833} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.171826] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1864.172774] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1240375-7618-41a9-9cd1-e9aae998ec78 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.395613] env[62510]: DEBUG nova.scheduler.client.report [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1864.484680] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f4d821-8241-4250-db91-fa3f574c95bf, 'name': SearchDatastore_Task, 'duration_secs': 0.016085} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.485070] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.485257] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1864.485543] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.485746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.485822] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1864.486105] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20bb6303-c0df-430b-ab74-f918aa0d50c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.501188] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1864.501298] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1864.502029] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ab51299-1fe7-416a-a3f1-b4d49bcb9221 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.507890] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1864.507890] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]526d19d0-3d5f-ede5-5fdf-188181f9fdcf" [ 1864.507890] env[62510]: _type = "Task" [ 1864.507890] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.516127] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526d19d0-3d5f-ede5-5fdf-188181f9fdcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.692459] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1864.693390] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8c464a32-ea59-4b59-89a9-3c1970f761ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.707144] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1864.707144] env[62510]: value = "task-1769411" [ 1864.707144] env[62510]: _type = "Task" [ 1864.707144] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.716066] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769411, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.901308] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.901833] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1864.905437] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.907s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.020613] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526d19d0-3d5f-ede5-5fdf-188181f9fdcf, 'name': SearchDatastore_Task, 'duration_secs': 0.033526} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.021354] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-deea5b9c-331b-429b-9677-119e9f39af3f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.027323] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1865.027323] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527837db-31a1-ab5e-e3fa-7135af3e2f2f" [ 1865.027323] env[62510]: _type = "Task" [ 1865.027323] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.036830] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527837db-31a1-ab5e-e3fa-7135af3e2f2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.217135] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769411, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.407217] env[62510]: DEBUG nova.compute.utils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1865.409026] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1865.409026] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1865.412776] env[62510]: INFO nova.compute.claims [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1865.455533] env[62510]: DEBUG nova.policy [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e21fc3222846949a70dc17fba01e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99bae3a9008a46349842b33ce6e41b25', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1865.539603] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527837db-31a1-ab5e-e3fa-7135af3e2f2f, 'name': SearchDatastore_Task, 'duration_secs': 0.03844} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.539928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.540218] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c/0f6e9363-47ac-481e-bc1c-b8f4f9748d9c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1865.540522] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e52f9660-e959-4ec9-adab-ff5015e1c76b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.550373] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1865.550373] env[62510]: value = "task-1769412" [ 1865.550373] env[62510]: _type = "Task" [ 1865.550373] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.560391] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769412, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.719357] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769411, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.734539] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Successfully created port: da533fd5-935b-4b32-8845-bea1060e4ca1 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1865.918079] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1865.922275] env[62510]: INFO nova.compute.resource_tracker [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating resource usage from migration 8a33b4da-7f27-4c88-9c51-4bbb9a0040a7 [ 1866.061246] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769412, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.203063] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6bfbadc-3acc-4795-a8d7-55da7520bacb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.217111] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548b7f49-65fd-4653-9c88-d44cad86f6ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.230087] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769411, 'name': CloneVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.256239] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063f8025-a715-4a80-8c5f-1869ee1b6d2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.264581] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1fb995-29e9-4047-b283-f3e0747d3bbc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.278948] env[62510]: DEBUG nova.compute.provider_tree [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1866.560977] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769412, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621533} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.561261] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c/0f6e9363-47ac-481e-bc1c-b8f4f9748d9c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1866.561488] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1866.561721] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37821e6f-1508-49c7-aa24-e70f5a060433 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.569673] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1866.569673] env[62510]: value = "task-1769413" [ 1866.569673] env[62510]: _type = "Task" [ 1866.569673] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.581920] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769413, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.721346] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769411, 'name': CloneVM_Task, 'duration_secs': 1.651753} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.721691] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Created linked-clone VM from snapshot [ 1866.722503] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a01237a-c08c-4bf7-9b36-a65e169addb1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.730067] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Uploading image da7c8e66-0047-4492-9c76-db7e729079e0 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1866.755880] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1866.755880] env[62510]: value = "vm-367464" [ 1866.755880] env[62510]: _type = "VirtualMachine" [ 1866.755880] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1866.756176] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0b07bd04-5293-4c90-aaef-2898f6d470ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.763158] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease: (returnval){ [ 1866.763158] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52faac01-f6b2-f446-d989-23ace8d5a612" [ 1866.763158] env[62510]: _type = "HttpNfcLease" [ 1866.763158] env[62510]: } obtained for exporting VM: (result){ [ 1866.763158] env[62510]: value = "vm-367464" [ 1866.763158] env[62510]: _type = "VirtualMachine" [ 1866.763158] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1866.763420] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the lease: (returnval){ [ 1866.763420] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52faac01-f6b2-f446-d989-23ace8d5a612" [ 1866.763420] env[62510]: _type = "HttpNfcLease" [ 1866.763420] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1866.770064] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1866.770064] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52faac01-f6b2-f446-d989-23ace8d5a612" [ 1866.770064] env[62510]: _type = "HttpNfcLease" [ 1866.770064] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1866.782014] env[62510]: DEBUG nova.scheduler.client.report [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1866.931861] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1866.955036] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1866.955317] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1866.955492] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1866.955689] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1866.955837] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1866.955983] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1866.956206] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1866.956366] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1866.956530] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1866.956688] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1866.956854] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1866.957749] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ab0f48-e3df-4b5b-83f3-5175ba88ab26 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.965777] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e1c1c0-d416-4615-b017-7755b6257524 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.082188] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769413, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066497} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.082473] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1867.083305] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e461c55-6b0f-4c2c-bb28-c34f3308b1f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.105720] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c/0f6e9363-47ac-481e-bc1c-b8f4f9748d9c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1867.106030] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9841dc5-7dc6-4c62-8e23-86362609bfe3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.129239] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1867.129239] env[62510]: value = "task-1769415" [ 1867.129239] env[62510]: _type = "Task" [ 1867.129239] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.137842] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769415, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.209235] env[62510]: DEBUG nova.compute.manager [req-8628438a-6127-4e67-9cd6-26d76af75adb req-b8effd23-658e-4ef0-a89d-84eabf605181 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Received event network-vif-plugged-da533fd5-935b-4b32-8845-bea1060e4ca1 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1867.209700] env[62510]: DEBUG oslo_concurrency.lockutils [req-8628438a-6127-4e67-9cd6-26d76af75adb req-b8effd23-658e-4ef0-a89d-84eabf605181 service nova] Acquiring lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.209700] env[62510]: DEBUG oslo_concurrency.lockutils [req-8628438a-6127-4e67-9cd6-26d76af75adb req-b8effd23-658e-4ef0-a89d-84eabf605181 service nova] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.209858] env[62510]: DEBUG oslo_concurrency.lockutils [req-8628438a-6127-4e67-9cd6-26d76af75adb req-b8effd23-658e-4ef0-a89d-84eabf605181 service nova] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.210036] env[62510]: DEBUG nova.compute.manager [req-8628438a-6127-4e67-9cd6-26d76af75adb req-b8effd23-658e-4ef0-a89d-84eabf605181 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] No waiting events found dispatching network-vif-plugged-da533fd5-935b-4b32-8845-bea1060e4ca1 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1867.210211] env[62510]: WARNING nova.compute.manager [req-8628438a-6127-4e67-9cd6-26d76af75adb req-b8effd23-658e-4ef0-a89d-84eabf605181 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Received unexpected event network-vif-plugged-da533fd5-935b-4b32-8845-bea1060e4ca1 for instance with vm_state building and task_state spawning. [ 1867.272495] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1867.272495] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52faac01-f6b2-f446-d989-23ace8d5a612" [ 1867.272495] env[62510]: _type = "HttpNfcLease" [ 1867.272495] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1867.272810] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1867.272810] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52faac01-f6b2-f446-d989-23ace8d5a612" [ 1867.272810] env[62510]: _type = "HttpNfcLease" [ 1867.272810] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1867.273596] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972a73f0-dd8f-4e1d-9a66-47106710fee9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.282030] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52009522-d61c-fcb3-1687-ecd502d1487a/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1867.282030] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52009522-d61c-fcb3-1687-ecd502d1487a/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1867.339496] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.434s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.339704] env[62510]: INFO nova.compute.manager [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Migrating [ 1867.350685] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Successfully updated port: da533fd5-935b-4b32-8845-bea1060e4ca1 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1867.352212] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.335s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.353171] env[62510]: INFO nova.compute.claims [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1867.392915] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d63cf2e9-9e08-4931-8fc5-f1784492fc4b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.642546] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769415, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.861457] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "refresh_cache-5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.861716] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "refresh_cache-5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.861885] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1867.865928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.866432] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.866513] env[62510]: DEBUG nova.network.neutron [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1868.141291] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769415, 'name': ReconfigVM_Task, 'duration_secs': 0.837773} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.141706] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c/0f6e9363-47ac-481e-bc1c-b8f4f9748d9c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1868.142537] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3da6da9c-ffcc-496e-ad73-52f0b871c9c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.150652] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1868.150652] env[62510]: value = "task-1769416" [ 1868.150652] env[62510]: _type = "Task" [ 1868.150652] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.161564] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769416, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.402377] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1868.565821] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Updating instance_info_cache with network_info: [{"id": "da533fd5-935b-4b32-8845-bea1060e4ca1", "address": "fa:16:3e:c4:50:7c", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda533fd5-93", "ovs_interfaceid": "da533fd5-935b-4b32-8845-bea1060e4ca1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.662803] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769416, 'name': Rename_Task, 'duration_secs': 0.173144} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.664074] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1868.664940] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afd7576-2bbd-4a4f-9879-84cd9b2e8bea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.667399] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-976ce871-8ce1-4c99-b556-3dea14292bd1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.675276] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472e38f7-da4f-4fa3-88f7-309fb23eb0d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.680896] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1868.680896] env[62510]: value = "task-1769417" [ 1868.680896] env[62510]: _type = "Task" [ 1868.680896] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.685447] env[62510]: DEBUG nova.network.neutron [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.717513] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b660c487-6231-4100-9abe-054abad014fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.723983] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769417, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.730817] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720f5e0c-8fba-43b9-bd4a-9f55b1f9831e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.750053] env[62510]: DEBUG nova.compute.provider_tree [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1869.069068] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "refresh_cache-5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.069547] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Instance network_info: |[{"id": "da533fd5-935b-4b32-8845-bea1060e4ca1", "address": "fa:16:3e:c4:50:7c", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda533fd5-93", "ovs_interfaceid": "da533fd5-935b-4b32-8845-bea1060e4ca1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1869.070086] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:50:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'da533fd5-935b-4b32-8845-bea1060e4ca1', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1869.078608] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Creating folder: Project (99bae3a9008a46349842b33ce6e41b25). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1869.078994] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c62e4895-24f0-4bb1-bbf8-d81ea9a99326 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.091698] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Created folder: Project (99bae3a9008a46349842b33ce6e41b25) in parent group-v367197. [ 1869.092023] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Creating folder: Instances. Parent ref: group-v367465. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1869.092347] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96fe97bf-dbaa-4bb6-a9d2-c55bf5c66dad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.105066] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Created folder: Instances in parent group-v367465. [ 1869.105357] env[62510]: DEBUG oslo.service.loopingcall [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.105635] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1869.105874] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a2385cc-5853-46c6-9250-719d285038fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.129526] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1869.129526] env[62510]: value = "task-1769420" [ 1869.129526] env[62510]: _type = "Task" [ 1869.129526] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.138337] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769420, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.188148] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.196936] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769417, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.237905] env[62510]: DEBUG nova.compute.manager [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Received event network-changed-da533fd5-935b-4b32-8845-bea1060e4ca1 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1869.238122] env[62510]: DEBUG nova.compute.manager [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Refreshing instance network info cache due to event network-changed-da533fd5-935b-4b32-8845-bea1060e4ca1. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1869.238345] env[62510]: DEBUG oslo_concurrency.lockutils [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] Acquiring lock "refresh_cache-5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.238493] env[62510]: DEBUG oslo_concurrency.lockutils [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] Acquired lock "refresh_cache-5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.238655] env[62510]: DEBUG nova.network.neutron [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Refreshing network info cache for port da533fd5-935b-4b32-8845-bea1060e4ca1 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1869.253677] env[62510]: DEBUG nova.scheduler.client.report [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1869.640410] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769420, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.701112] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769417, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.761693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.762265] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1869.765111] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.455s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.766525] env[62510]: INFO nova.compute.claims [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1870.022486] env[62510]: DEBUG nova.network.neutron [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Updated VIF entry in instance network info cache for port da533fd5-935b-4b32-8845-bea1060e4ca1. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1870.022931] env[62510]: DEBUG nova.network.neutron [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Updating instance_info_cache with network_info: [{"id": "da533fd5-935b-4b32-8845-bea1060e4ca1", "address": "fa:16:3e:c4:50:7c", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapda533fd5-93", "ovs_interfaceid": "da533fd5-935b-4b32-8845-bea1060e4ca1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.141423] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769420, 'name': CreateVM_Task, 'duration_secs': 0.566537} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.141678] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1870.142388] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.142696] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.143074] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1870.143159] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49ede7b0-b922-4d3a-b641-621356cd8e16 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.148047] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1870.148047] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bb2c68-9559-6990-7b02-d15ab62478e0" [ 1870.148047] env[62510]: _type = "Task" [ 1870.148047] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.156369] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bb2c68-9559-6990-7b02-d15ab62478e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.193319] env[62510]: DEBUG oslo_vmware.api [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769417, 'name': PowerOnVM_Task, 'duration_secs': 1.22352} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.193612] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1870.193820] env[62510]: INFO nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Took 10.51 seconds to spawn the instance on the hypervisor. [ 1870.194009] env[62510]: DEBUG nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1870.195689] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf5e529-cf6a-4e64-894b-a804923e1885 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.273962] env[62510]: DEBUG nova.compute.utils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1870.276422] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1870.276699] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1870.313662] env[62510]: DEBUG nova.policy [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e21fc3222846949a70dc17fba01e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99bae3a9008a46349842b33ce6e41b25', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1870.525647] env[62510]: DEBUG oslo_concurrency.lockutils [req-d5281869-2f4d-47da-8e6d-bdb6d5e581fe req-fc2d812a-1df8-4719-9b8c-5fb8b46c63e3 service nova] Releasing lock "refresh_cache-5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.592751] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Successfully created port: 359f36f0-f995-4822-b3df-83b9a561be76 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1870.659963] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bb2c68-9559-6990-7b02-d15ab62478e0, 'name': SearchDatastore_Task, 'duration_secs': 0.012153} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.660290] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.660529] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1870.660762] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.660907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.661097] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1870.661353] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d22b8f8d-1ec5-42bc-988c-951aa54f7106 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.674656] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1870.674837] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1870.675623] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dd959f2-482f-4814-b10b-c4d4fec6e50b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.681457] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1870.681457] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52225bc6-cb72-f38d-9e03-15eaacafac39" [ 1870.681457] env[62510]: _type = "Task" [ 1870.681457] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.689634] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52225bc6-cb72-f38d-9e03-15eaacafac39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.717306] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0009a7ff-211a-42d4-8c86-1c2505f5b780 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.723944] env[62510]: INFO nova.compute.manager [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Took 21.38 seconds to build instance. [ 1870.741635] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance '841460b0-d917-44ea-88c6-0e5a3022f658' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1870.776588] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1871.046292] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2d41de-dde6-4eab-a733-03d37c568573 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.054302] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfdb8b0-d7a0-4827-93de-ee96c70629fb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.088662] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a334b0f-2532-4676-a38b-02492a7420ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.096790] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc16612a-00c5-488d-b9be-f025dc20f948 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.112028] env[62510]: DEBUG nova.compute.provider_tree [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1871.192620] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52225bc6-cb72-f38d-9e03-15eaacafac39, 'name': SearchDatastore_Task, 'duration_secs': 0.029745} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.193477] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfb0c121-0c00-4817-ac01-c152d8af2c3d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.200281] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1871.200281] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5286a49e-d8d2-8097-52f0-3caa7b041578" [ 1871.200281] env[62510]: _type = "Task" [ 1871.200281] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.208750] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5286a49e-d8d2-8097-52f0-3caa7b041578, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.227385] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2c1881d4-1f01-45fb-9312-e9a5f5a325f9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.896s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.248475] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1871.248819] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10b0d440-89e5-49d1-9c3c-94e22fe00f6b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.257571] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1871.257571] env[62510]: value = "task-1769421" [ 1871.257571] env[62510]: _type = "Task" [ 1871.257571] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.266993] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.615207] env[62510]: DEBUG nova.scheduler.client.report [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1871.711217] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5286a49e-d8d2-8097-52f0-3caa7b041578, 'name': SearchDatastore_Task, 'duration_secs': 0.009797} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.711546] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.711817] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c/5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1871.712108] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4184211-4fbe-420e-95c7-9924db600d27 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.720432] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1871.720432] env[62510]: value = "task-1769422" [ 1871.720432] env[62510]: _type = "Task" [ 1871.720432] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.729035] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.769570] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1871.769813] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance '841460b0-d917-44ea-88c6-0e5a3022f658' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1871.788309] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1871.825523] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1871.825855] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1871.825966] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1871.826288] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1871.826631] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1871.827305] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1871.827583] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1871.827766] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1871.830041] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1871.830041] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1871.830041] env[62510]: DEBUG nova.virt.hardware [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1871.830041] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ebf949c-b202-440d-9107-4a511a2920f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.839522] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd742c8-5722-44ce-8d5a-dc210df33097 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.870171] env[62510]: DEBUG nova.compute.manager [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-changed-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1871.870411] env[62510]: DEBUG nova.compute.manager [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing instance network info cache due to event network-changed-f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1871.870614] env[62510]: DEBUG oslo_concurrency.lockutils [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.870924] env[62510]: DEBUG oslo_concurrency.lockutils [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.871128] env[62510]: DEBUG nova.network.neutron [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing network info cache for port f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1872.121158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.122221] env[62510]: DEBUG nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1872.125702] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.383s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.128317] env[62510]: INFO nova.compute.claims [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1872.233522] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769422, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480773} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.233522] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c/5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1872.233522] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1872.233522] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-551c04c8-74dd-4609-a2ba-929e13b8ff87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.242122] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1872.242122] env[62510]: value = "task-1769423" [ 1872.242122] env[62510]: _type = "Task" [ 1872.242122] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.252719] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769423, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.278817] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1872.279210] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1872.279416] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1872.279625] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1872.279779] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1872.279991] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1872.280252] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1872.280420] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1872.280625] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1872.280811] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1872.280984] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1872.288822] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ab17404-295f-40ee-bbf1-53d8a7bb8fdf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.305118] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1872.305118] env[62510]: value = "task-1769424" [ 1872.305118] env[62510]: _type = "Task" [ 1872.305118] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.316833] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769424, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.476223] env[62510]: DEBUG nova.compute.manager [req-389def7d-9b37-465f-a26a-7c44c1737f3c req-db10a166-fcb9-40c3-8983-4b1315353ccb service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Received event network-vif-plugged-359f36f0-f995-4822-b3df-83b9a561be76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1872.476223] env[62510]: DEBUG oslo_concurrency.lockutils [req-389def7d-9b37-465f-a26a-7c44c1737f3c req-db10a166-fcb9-40c3-8983-4b1315353ccb service nova] Acquiring lock "82dceacf-1898-4d86-b1c6-552a24ab565f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1872.476223] env[62510]: DEBUG oslo_concurrency.lockutils [req-389def7d-9b37-465f-a26a-7c44c1737f3c req-db10a166-fcb9-40c3-8983-4b1315353ccb service nova] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1872.476223] env[62510]: DEBUG oslo_concurrency.lockutils [req-389def7d-9b37-465f-a26a-7c44c1737f3c req-db10a166-fcb9-40c3-8983-4b1315353ccb service nova] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.476223] env[62510]: DEBUG nova.compute.manager [req-389def7d-9b37-465f-a26a-7c44c1737f3c req-db10a166-fcb9-40c3-8983-4b1315353ccb service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] No waiting events found dispatching network-vif-plugged-359f36f0-f995-4822-b3df-83b9a561be76 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1872.476223] env[62510]: WARNING nova.compute.manager [req-389def7d-9b37-465f-a26a-7c44c1737f3c req-db10a166-fcb9-40c3-8983-4b1315353ccb service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Received unexpected event network-vif-plugged-359f36f0-f995-4822-b3df-83b9a561be76 for instance with vm_state building and task_state spawning. [ 1872.634092] env[62510]: DEBUG nova.compute.utils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1872.637732] env[62510]: DEBUG nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1872.637924] env[62510]: DEBUG nova.network.neutron [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1872.643953] env[62510]: DEBUG nova.network.neutron [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updated VIF entry in instance network info cache for port f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1872.644415] env[62510]: DEBUG nova.network.neutron [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.679961] env[62510]: DEBUG nova.policy [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbc6eced57ea45fdafc3635a58fb3611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f878b652f01c48139bfc6996e5e32f5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1872.755821] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769423, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070873} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.756174] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1872.757392] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62590883-ac72-470b-979a-2db933184675 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.782060] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c/5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1872.782407] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dd78e9c-80b1-466d-aeb3-c7f6b43c87a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.804289] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1872.804289] env[62510]: value = "task-1769425" [ 1872.804289] env[62510]: _type = "Task" [ 1872.804289] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.817811] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769425, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.821031] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769424, 'name': ReconfigVM_Task, 'duration_secs': 0.215792} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.821276] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance '841460b0-d917-44ea-88c6-0e5a3022f658' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1872.961591] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Successfully updated port: 359f36f0-f995-4822-b3df-83b9a561be76 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1873.027803] env[62510]: DEBUG nova.network.neutron [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Successfully created port: b0fce605-e00b-4356-8005-b66dcb30663b {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1873.078492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.078753] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.140963] env[62510]: DEBUG nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1873.147263] env[62510]: DEBUG oslo_concurrency.lockutils [req-43e31222-51c8-4b69-a27c-e5686d6ce5df req-e0af25d3-b622-49d3-9d04-fee7a0a682b6 service nova] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.318020] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.327445] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1873.327705] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1873.327921] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1873.328067] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1873.328217] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1873.328366] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1873.328631] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1873.328802] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1873.328970] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1873.329242] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1873.329321] env[62510]: DEBUG nova.virt.hardware [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1873.334967] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Reconfiguring VM instance instance-00000044 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1873.335448] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0ed3bda-ce64-442c-a644-38c7844b2255 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.361599] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1873.361599] env[62510]: value = "task-1769426" [ 1873.361599] env[62510]: _type = "Task" [ 1873.361599] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.370756] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769426, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.464527] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "refresh_cache-82dceacf-1898-4d86-b1c6-552a24ab565f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.464636] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "refresh_cache-82dceacf-1898-4d86-b1c6-552a24ab565f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.464742] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1873.551288] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c97c34-e4fe-4d1a-8df6-5c1e96680842 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.563280] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a86792-ff35-4c74-94fa-2f256342dfd9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.603899] env[62510]: DEBUG nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1873.605082] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f672fb-59f4-4b4c-8802-172b68195ba9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.614555] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d295bac-0092-4e7d-bafd-56655da2523e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.630897] env[62510]: DEBUG nova.compute.provider_tree [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1873.818082] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769425, 'name': ReconfigVM_Task, 'duration_secs': 0.91036} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.818483] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c/5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1873.819339] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd43df31-5e6f-4903-92fc-a6e14b8f596d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.828831] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1873.828831] env[62510]: value = "task-1769427" [ 1873.828831] env[62510]: _type = "Task" [ 1873.828831] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.841206] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769427, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.872526] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769426, 'name': ReconfigVM_Task, 'duration_secs': 0.366919} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.872829] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Reconfigured VM instance instance-00000044 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1873.873730] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124d58a7-94ed-452c-b076-4f58d7f40834 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.897096] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 841460b0-d917-44ea-88c6-0e5a3022f658/841460b0-d917-44ea-88c6-0e5a3022f658.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1873.898459] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24ee543b-b898-4132-8103-0584e209b6d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.912037] env[62510]: DEBUG nova.compute.manager [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Received event network-changed-359f36f0-f995-4822-b3df-83b9a561be76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1873.912254] env[62510]: DEBUG nova.compute.manager [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Refreshing instance network info cache due to event network-changed-359f36f0-f995-4822-b3df-83b9a561be76. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1873.912453] env[62510]: DEBUG oslo_concurrency.lockutils [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] Acquiring lock "refresh_cache-82dceacf-1898-4d86-b1c6-552a24ab565f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.920025] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1873.920025] env[62510]: value = "task-1769428" [ 1873.920025] env[62510]: _type = "Task" [ 1873.920025] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.928326] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769428, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.999550] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1874.126657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.133888] env[62510]: DEBUG nova.scheduler.client.report [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1874.152507] env[62510]: DEBUG nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1874.182317] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1874.182655] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1874.182850] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1874.183066] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1874.183229] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1874.183378] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1874.183645] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1874.183825] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1874.184009] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1874.184184] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1874.184353] env[62510]: DEBUG nova.virt.hardware [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1874.185373] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a50459c-dd97-4402-8d3c-3c2f769f7967 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.191566] env[62510]: DEBUG nova.network.neutron [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Updating instance_info_cache with network_info: [{"id": "359f36f0-f995-4822-b3df-83b9a561be76", "address": "fa:16:3e:e2:d9:13", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap359f36f0-f9", "ovs_interfaceid": "359f36f0-f995-4822-b3df-83b9a561be76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.196294] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2562bbe3-3cf4-4a3e-8e95-acf769d20dfa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.340904] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769427, 'name': Rename_Task, 'duration_secs': 0.344864} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.341268] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1874.341268] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b75f2d33-f0fb-4ac9-972d-dbb689890897 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.349697] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1874.349697] env[62510]: value = "task-1769429" [ 1874.349697] env[62510]: _type = "Task" [ 1874.349697] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.361124] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769429, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.431090] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.639804] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.514s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.640512] env[62510]: DEBUG nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1874.643473] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.547s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.645212] env[62510]: INFO nova.compute.claims [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1874.696460] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "refresh_cache-82dceacf-1898-4d86-b1c6-552a24ab565f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.696808] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Instance network_info: |[{"id": "359f36f0-f995-4822-b3df-83b9a561be76", "address": "fa:16:3e:e2:d9:13", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap359f36f0-f9", "ovs_interfaceid": "359f36f0-f995-4822-b3df-83b9a561be76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1874.698425] env[62510]: DEBUG oslo_concurrency.lockutils [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] Acquired lock "refresh_cache-82dceacf-1898-4d86-b1c6-552a24ab565f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.698721] env[62510]: DEBUG nova.network.neutron [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Refreshing network info cache for port 359f36f0-f995-4822-b3df-83b9a561be76 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1874.699882] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:d9:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '359f36f0-f995-4822-b3df-83b9a561be76', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1874.708422] env[62510]: DEBUG oslo.service.loopingcall [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1874.711500] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1874.712012] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a703abd-8080-4556-9a0e-fbef3dc0d375 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.733374] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1874.733374] env[62510]: value = "task-1769430" [ 1874.733374] env[62510]: _type = "Task" [ 1874.733374] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.742896] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769430, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.854891] env[62510]: DEBUG nova.network.neutron [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Successfully updated port: b0fce605-e00b-4356-8005-b66dcb30663b {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1874.863374] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769429, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.932295] env[62510]: DEBUG oslo_vmware.api [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769428, 'name': ReconfigVM_Task, 'duration_secs': 0.732539} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.932598] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 841460b0-d917-44ea-88c6-0e5a3022f658/841460b0-d917-44ea-88c6-0e5a3022f658.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1874.932941] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance '841460b0-d917-44ea-88c6-0e5a3022f658' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1874.996916] env[62510]: DEBUG nova.network.neutron [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Updated VIF entry in instance network info cache for port 359f36f0-f995-4822-b3df-83b9a561be76. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1874.996916] env[62510]: DEBUG nova.network.neutron [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Updating instance_info_cache with network_info: [{"id": "359f36f0-f995-4822-b3df-83b9a561be76", "address": "fa:16:3e:e2:d9:13", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap359f36f0-f9", "ovs_interfaceid": "359f36f0-f995-4822-b3df-83b9a561be76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.151130] env[62510]: DEBUG nova.compute.utils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1875.154579] env[62510]: DEBUG nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1875.154775] env[62510]: DEBUG nova.network.neutron [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1875.194521] env[62510]: DEBUG nova.policy [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3df98c323ad747e091a3a6b3470aa68a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e07dbff451a34b03b9250a44993b0a58', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1875.246178] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769430, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.357950] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-0c93a909-d08f-466c-bdef-a26fa35cd944" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.358343] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-0c93a909-d08f-466c-bdef-a26fa35cd944" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.358400] env[62510]: DEBUG nova.network.neutron [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1875.363972] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769429, 'name': PowerOnVM_Task, 'duration_secs': 0.910479} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.364657] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1875.364968] env[62510]: INFO nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Took 8.43 seconds to spawn the instance on the hypervisor. [ 1875.365172] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1875.366165] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799ceb44-191d-4b28-b77c-5fd23109aef5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.439633] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673af4c4-8c5a-40af-a093-f40f98e8f157 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.468363] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32247717-d3f7-4c31-b202-d61ab5d97754 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.490204] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance '841460b0-d917-44ea-88c6-0e5a3022f658' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1875.500172] env[62510]: DEBUG oslo_concurrency.lockutils [req-e3f7e609-b49a-4192-aec4-54d4008aa51b req-937a2c65-a4ef-4c01-8471-1d5d3254df41 service nova] Releasing lock "refresh_cache-82dceacf-1898-4d86-b1c6-552a24ab565f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.566455] env[62510]: DEBUG nova.network.neutron [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Successfully created port: f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1875.657809] env[62510]: DEBUG nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1875.747783] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769430, 'name': CreateVM_Task, 'duration_secs': 0.682542} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.748145] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1875.748852] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.749028] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.749417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1875.751963] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-282520ca-e3f1-4541-b286-fbcba0fad44f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.757304] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1875.757304] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5292658b-64e0-75ed-e31d-1b4f28448863" [ 1875.757304] env[62510]: _type = "Task" [ 1875.757304] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.765818] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5292658b-64e0-75ed-e31d-1b4f28448863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.892426] env[62510]: INFO nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Took 17.94 seconds to build instance. [ 1875.927139] env[62510]: DEBUG nova.compute.manager [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Received event network-vif-plugged-b0fce605-e00b-4356-8005-b66dcb30663b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1875.927243] env[62510]: DEBUG oslo_concurrency.lockutils [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] Acquiring lock "0c93a909-d08f-466c-bdef-a26fa35cd944-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.927447] env[62510]: DEBUG oslo_concurrency.lockutils [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.927646] env[62510]: DEBUG oslo_concurrency.lockutils [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.927826] env[62510]: DEBUG nova.compute.manager [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] No waiting events found dispatching network-vif-plugged-b0fce605-e00b-4356-8005-b66dcb30663b {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1875.927989] env[62510]: WARNING nova.compute.manager [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Received unexpected event network-vif-plugged-b0fce605-e00b-4356-8005-b66dcb30663b for instance with vm_state building and task_state spawning. [ 1875.928284] env[62510]: DEBUG nova.compute.manager [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Received event network-changed-b0fce605-e00b-4356-8005-b66dcb30663b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1875.928478] env[62510]: DEBUG nova.compute.manager [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Refreshing instance network info cache due to event network-changed-b0fce605-e00b-4356-8005-b66dcb30663b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1875.928678] env[62510]: DEBUG oslo_concurrency.lockutils [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] Acquiring lock "refresh_cache-0c93a909-d08f-466c-bdef-a26fa35cd944" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.937761] env[62510]: DEBUG nova.network.neutron [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1876.007758] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f114bd62-ab98-4bc4-80c2-f6b0a990266d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.023304] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc577fed-0cf3-4b0c-9bd6-f97cd091f6da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.067739] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297ffe23-2ba2-4033-9fb8-7b97a2b56794 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.080937] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043ded50-3538-41bf-84c4-be9f96ff444a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.093182] env[62510]: DEBUG nova.compute.provider_tree [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1876.110523] env[62510]: DEBUG nova.network.neutron [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Port 5992dff8-0336-4d13-bbe8-2614b9dc96d5 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1876.156146] env[62510]: DEBUG nova.network.neutron [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Updating instance_info_cache with network_info: [{"id": "b0fce605-e00b-4356-8005-b66dcb30663b", "address": "fa:16:3e:7e:84:91", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0fce605-e0", "ovs_interfaceid": "b0fce605-e00b-4356-8005-b66dcb30663b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1876.268994] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5292658b-64e0-75ed-e31d-1b4f28448863, 'name': SearchDatastore_Task, 'duration_secs': 0.031526} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.269273] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.269514] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1876.269751] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.269963] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.270182] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1876.270448] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e1fbdb4-fd93-4a53-98a4-0168b20cbd64 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.280898] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1876.281109] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1876.281835] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19210107-0d49-434b-856c-58b601f852c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.287985] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1876.287985] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520a5031-e709-6cad-5c8c-01a8449d0a9e" [ 1876.287985] env[62510]: _type = "Task" [ 1876.287985] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.296590] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520a5031-e709-6cad-5c8c-01a8449d0a9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.395159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.454s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.596141] env[62510]: DEBUG nova.scheduler.client.report [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1876.658845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-0c93a909-d08f-466c-bdef-a26fa35cd944" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.659041] env[62510]: DEBUG nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Instance network_info: |[{"id": "b0fce605-e00b-4356-8005-b66dcb30663b", "address": "fa:16:3e:7e:84:91", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0fce605-e0", "ovs_interfaceid": "b0fce605-e00b-4356-8005-b66dcb30663b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1876.659438] env[62510]: DEBUG oslo_concurrency.lockutils [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] Acquired lock "refresh_cache-0c93a909-d08f-466c-bdef-a26fa35cd944" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.659722] env[62510]: DEBUG nova.network.neutron [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Refreshing network info cache for port b0fce605-e00b-4356-8005-b66dcb30663b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1876.661105] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:84:91', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0fce605-e00b-4356-8005-b66dcb30663b', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1876.669407] env[62510]: DEBUG oslo.service.loopingcall [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1876.671976] env[62510]: DEBUG nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1876.673950] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1876.674636] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6887d2be-01ba-4e9f-b9cb-0280d45b32e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.697111] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1876.697111] env[62510]: value = "task-1769431" [ 1876.697111] env[62510]: _type = "Task" [ 1876.697111] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.699394] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1876.699642] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1876.699800] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1876.700017] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1876.700182] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1876.700330] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1876.700537] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1876.700695] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1876.700860] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1876.701036] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1876.701217] env[62510]: DEBUG nova.virt.hardware [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1876.702104] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c41305-02ea-45f7-a1c1-c53e7c991c65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.716729] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769431, 'name': CreateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.720186] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b52e787-76a6-4dc8-b60e-ee7c9b408e20 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.799622] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520a5031-e709-6cad-5c8c-01a8449d0a9e, 'name': SearchDatastore_Task, 'duration_secs': 0.017566} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.800456] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64a78020-eb19-4030-94f9-27b26fbc4775 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.807350] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1876.807350] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5270901a-c436-d4b3-165d-4e9ccaa5eb4b" [ 1876.807350] env[62510]: _type = "Task" [ 1876.807350] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.817105] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5270901a-c436-d4b3-165d-4e9ccaa5eb4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.101494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.102282] env[62510]: DEBUG nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1877.105358] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.979s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.107044] env[62510]: INFO nova.compute.claims [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1877.136130] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.136371] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.136553] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.215146] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769431, 'name': CreateVM_Task, 'duration_secs': 0.437995} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.215364] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1877.216169] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.216362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.216693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1877.219280] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32209c67-edef-4030-9a34-16bfb50597c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.226170] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1877.226170] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527a2f65-1a86-b8ac-e099-021a04a43895" [ 1877.226170] env[62510]: _type = "Task" [ 1877.226170] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.235769] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527a2f65-1a86-b8ac-e099-021a04a43895, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.321452] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5270901a-c436-d4b3-165d-4e9ccaa5eb4b, 'name': SearchDatastore_Task, 'duration_secs': 0.010792} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.321717] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.321976] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 82dceacf-1898-4d86-b1c6-552a24ab565f/82dceacf-1898-4d86-b1c6-552a24ab565f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1877.322256] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-495b0141-e323-4efd-937a-fba002dfa9a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.330266] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1877.330266] env[62510]: value = "task-1769432" [ 1877.330266] env[62510]: _type = "Task" [ 1877.330266] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.339314] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.532506] env[62510]: DEBUG nova.network.neutron [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Updated VIF entry in instance network info cache for port b0fce605-e00b-4356-8005-b66dcb30663b. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1877.532886] env[62510]: DEBUG nova.network.neutron [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Updating instance_info_cache with network_info: [{"id": "b0fce605-e00b-4356-8005-b66dcb30663b", "address": "fa:16:3e:7e:84:91", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0fce605-e0", "ovs_interfaceid": "b0fce605-e00b-4356-8005-b66dcb30663b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.611884] env[62510]: DEBUG nova.compute.utils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1877.618059] env[62510]: DEBUG nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1877.618314] env[62510]: DEBUG nova.network.neutron [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1877.646713] env[62510]: DEBUG nova.network.neutron [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Successfully updated port: f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1877.677780] env[62510]: DEBUG nova.policy [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '97a7f1ca55d549a3985e95b6bbc665f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '94a46473611d4b22be7c66c909d1b348', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1877.741705] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527a2f65-1a86-b8ac-e099-021a04a43895, 'name': SearchDatastore_Task, 'duration_secs': 0.013608} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.742157] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.742411] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1877.742861] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.743476] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.743476] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1877.743726] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b1bd495-0b30-40ad-9922-2539bd462ec1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.762087] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1877.762367] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1877.763454] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a90c9d0c-c9d2-4cb0-86f5-d5d8dddfd833 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.773317] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1877.773317] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521adf52-b6ba-1263-e307-0e6b224f38f1" [ 1877.773317] env[62510]: _type = "Task" [ 1877.773317] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.784968] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521adf52-b6ba-1263-e307-0e6b224f38f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.844243] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769432, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.976693] env[62510]: DEBUG nova.compute.manager [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Received event network-vif-plugged-f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1877.976928] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] Acquiring lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.977151] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.977321] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.977540] env[62510]: DEBUG nova.compute.manager [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] No waiting events found dispatching network-vif-plugged-f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1877.977912] env[62510]: WARNING nova.compute.manager [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Received unexpected event network-vif-plugged-f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 for instance with vm_state building and task_state spawning. [ 1877.978102] env[62510]: DEBUG nova.compute.manager [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Received event network-changed-f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1877.978295] env[62510]: DEBUG nova.compute.manager [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Refreshing instance network info cache due to event network-changed-f9e5bc34-1b3a-416a-bb15-ce81423ee2a8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1877.978483] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] Acquiring lock "refresh_cache-5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.978635] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] Acquired lock "refresh_cache-5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.978903] env[62510]: DEBUG nova.network.neutron [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Refreshing network info cache for port f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1878.036880] env[62510]: DEBUG oslo_concurrency.lockutils [req-b7aa54b7-bfc0-4f03-a21c-56aaa8473dd7 req-a314f202-af45-43be-b358-66b75f8495b3 service nova] Releasing lock "refresh_cache-0c93a909-d08f-466c-bdef-a26fa35cd944" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.119493] env[62510]: DEBUG nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1878.125715] env[62510]: DEBUG nova.network.neutron [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Successfully created port: 5a0b680e-7c0d-4008-91b8-216bc3b9da1e {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1878.152179] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "refresh_cache-5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.199925] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.199925] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.199925] env[62510]: DEBUG nova.network.neutron [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1878.286079] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521adf52-b6ba-1263-e307-0e6b224f38f1, 'name': SearchDatastore_Task, 'duration_secs': 0.076642} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.287043] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab17680-b1e1-4bbb-b7cb-8895ad921adc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.293400] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1878.293400] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5261d3a1-55d8-1e56-35be-6657d366fa5a" [ 1878.293400] env[62510]: _type = "Task" [ 1878.293400] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.303090] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5261d3a1-55d8-1e56-35be-6657d366fa5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.343900] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769432, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538916} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.344208] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 82dceacf-1898-4d86-b1c6-552a24ab565f/82dceacf-1898-4d86-b1c6-552a24ab565f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1878.344432] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1878.344695] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97600095-4095-4b65-a3e9-c6871834ad5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.353535] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1878.353535] env[62510]: value = "task-1769433" [ 1878.353535] env[62510]: _type = "Task" [ 1878.353535] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.369958] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769433, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.512174] env[62510]: DEBUG nova.network.neutron [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1878.558394] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc563ae-ebea-4072-8718-b4122f7225ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.567035] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0898fcd3-26e7-4f08-a610-87a4f4586c8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.603099] env[62510]: DEBUG nova.network.neutron [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.605024] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c906fc6-0af3-4612-82c9-235a623576b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.614588] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74330ba6-e999-4c4c-8f81-d5161de4bf2e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.633617] env[62510]: DEBUG nova.compute.provider_tree [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1878.805070] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5261d3a1-55d8-1e56-35be-6657d366fa5a, 'name': SearchDatastore_Task, 'duration_secs': 0.011157} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.807470] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.807796] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0c93a909-d08f-466c-bdef-a26fa35cd944/0c93a909-d08f-466c-bdef-a26fa35cd944.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1878.808090] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5662f80-e38c-4955-8af8-dda9c9d499c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.816230] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1878.816230] env[62510]: value = "task-1769434" [ 1878.816230] env[62510]: _type = "Task" [ 1878.816230] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.824970] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769434, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.863138] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769433, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08744} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.865641] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1878.866492] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d05750-cd52-4fde-9c76-f6355b293df4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.892054] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 82dceacf-1898-4d86-b1c6-552a24ab565f/82dceacf-1898-4d86-b1c6-552a24ab565f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1878.892296] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67c668a5-203b-459f-99e5-5151f5e60893 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.917237] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1878.917237] env[62510]: value = "task-1769435" [ 1878.917237] env[62510]: _type = "Task" [ 1878.917237] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.926163] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769435, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.109289] env[62510]: DEBUG oslo_concurrency.lockutils [req-5f802f8c-fd5d-4469-a5f1-d9a2a09b5579 req-09764006-68d3-43c9-847b-5b1886d32fd4 service nova] Releasing lock "refresh_cache-5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.109775] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquired lock "refresh_cache-5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.109957] env[62510]: DEBUG nova.network.neutron [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1879.136983] env[62510]: DEBUG nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1879.140358] env[62510]: DEBUG nova.scheduler.client.report [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1879.180881] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1879.181268] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1879.181476] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1879.181716] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1879.181957] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1879.182240] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1879.182537] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1879.182788] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1879.183062] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1879.183347] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1879.183611] env[62510]: DEBUG nova.virt.hardware [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1879.184822] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028da54e-1e36-4b52-91a9-e8416a319f0b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.195628] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe97e2ba-3c3f-4b1c-aea1-e177a5a06f27 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.328793] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769434, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.339971] env[62510]: DEBUG nova.network.neutron [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.407316] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52009522-d61c-fcb3-1687-ecd502d1487a/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1879.408367] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe381be-ef99-46b0-8a3d-1d530f0429b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.415673] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52009522-d61c-fcb3-1687-ecd502d1487a/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1879.415872] env[62510]: ERROR oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52009522-d61c-fcb3-1687-ecd502d1487a/disk-0.vmdk due to incomplete transfer. [ 1879.416151] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-af890339-801e-47b8-807f-09a7501ef233 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.426656] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769435, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.427834] env[62510]: DEBUG oslo_vmware.rw_handles [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52009522-d61c-fcb3-1687-ecd502d1487a/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1879.428078] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Uploaded image da7c8e66-0047-4492-9c76-db7e729079e0 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1879.430421] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1879.430730] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b66f76b9-bf3e-4047-b929-d169bfc37712 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.437155] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1879.437155] env[62510]: value = "task-1769436" [ 1879.437155] env[62510]: _type = "Task" [ 1879.437155] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.445978] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769436, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.641563] env[62510]: DEBUG nova.network.neutron [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1879.648582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.649255] env[62510]: DEBUG nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1879.831023] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769434, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54273} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.831023] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 0c93a909-d08f-466c-bdef-a26fa35cd944/0c93a909-d08f-466c-bdef-a26fa35cd944.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1879.831023] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1879.831023] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a706da6-db20-4672-ac01-828c6ea744b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.839662] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1879.839662] env[62510]: value = "task-1769437" [ 1879.839662] env[62510]: _type = "Task" [ 1879.839662] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.846156] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.859922] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769437, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.861047] env[62510]: DEBUG nova.network.neutron [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Updating instance_info_cache with network_info: [{"id": "f9e5bc34-1b3a-416a-bb15-ce81423ee2a8", "address": "fa:16:3e:f5:ba:6a", "network": {"id": "e97f8dcb-c62c-4fe8-9465-30a087e3ea21", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1479713995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e07dbff451a34b03b9250a44993b0a58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9e5bc34-1b", "ovs_interfaceid": "f9e5bc34-1b3a-416a-bb15-ce81423ee2a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.929021] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769435, 'name': ReconfigVM_Task, 'duration_secs': 0.62711} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.929021] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 82dceacf-1898-4d86-b1c6-552a24ab565f/82dceacf-1898-4d86-b1c6-552a24ab565f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1879.929649] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbed3c83-fc7d-4926-adab-1a4661f9de68 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.935973] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1879.935973] env[62510]: value = "task-1769438" [ 1879.935973] env[62510]: _type = "Task" [ 1879.935973] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.950064] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769438, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.953176] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769436, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.004495] env[62510]: DEBUG nova.compute.manager [req-298ecfc3-aef9-4fa2-bd81-3e6fd759282b req-53bb9fc4-0502-4c8e-948a-4f154f696560 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Received event network-vif-plugged-5a0b680e-7c0d-4008-91b8-216bc3b9da1e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1880.004761] env[62510]: DEBUG oslo_concurrency.lockutils [req-298ecfc3-aef9-4fa2-bd81-3e6fd759282b req-53bb9fc4-0502-4c8e-948a-4f154f696560 service nova] Acquiring lock "c2be17de-175a-401f-8c53-f785aeecfff4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.004927] env[62510]: DEBUG oslo_concurrency.lockutils [req-298ecfc3-aef9-4fa2-bd81-3e6fd759282b req-53bb9fc4-0502-4c8e-948a-4f154f696560 service nova] Lock "c2be17de-175a-401f-8c53-f785aeecfff4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.005127] env[62510]: DEBUG oslo_concurrency.lockutils [req-298ecfc3-aef9-4fa2-bd81-3e6fd759282b req-53bb9fc4-0502-4c8e-948a-4f154f696560 service nova] Lock "c2be17de-175a-401f-8c53-f785aeecfff4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.005304] env[62510]: DEBUG nova.compute.manager [req-298ecfc3-aef9-4fa2-bd81-3e6fd759282b req-53bb9fc4-0502-4c8e-948a-4f154f696560 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] No waiting events found dispatching network-vif-plugged-5a0b680e-7c0d-4008-91b8-216bc3b9da1e {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1880.005533] env[62510]: WARNING nova.compute.manager [req-298ecfc3-aef9-4fa2-bd81-3e6fd759282b req-53bb9fc4-0502-4c8e-948a-4f154f696560 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Received unexpected event network-vif-plugged-5a0b680e-7c0d-4008-91b8-216bc3b9da1e for instance with vm_state building and task_state spawning. [ 1880.037313] env[62510]: DEBUG nova.network.neutron [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Successfully updated port: 5a0b680e-7c0d-4008-91b8-216bc3b9da1e {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1880.160500] env[62510]: DEBUG nova.compute.utils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1880.162346] env[62510]: DEBUG nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1880.162529] env[62510]: DEBUG nova.network.neutron [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1880.233014] env[62510]: DEBUG nova.policy [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1880.350391] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769437, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094451} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.352721] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1880.352721] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b39ee5-52a5-4ed0-bc46-870b1bc798e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.371022] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Releasing lock "refresh_cache-5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.373298] env[62510]: DEBUG nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Instance network_info: |[{"id": "f9e5bc34-1b3a-416a-bb15-ce81423ee2a8", "address": "fa:16:3e:f5:ba:6a", "network": {"id": "e97f8dcb-c62c-4fe8-9465-30a087e3ea21", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1479713995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e07dbff451a34b03b9250a44993b0a58", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "97b68ed7-8461-4345-b064-96a1dde53a86", "external-id": "nsx-vlan-transportzone-140", "segmentation_id": 140, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9e5bc34-1b", "ovs_interfaceid": "f9e5bc34-1b3a-416a-bb15-ce81423ee2a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1880.382133] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 0c93a909-d08f-466c-bdef-a26fa35cd944/0c93a909-d08f-466c-bdef-a26fa35cd944.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1880.383137] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:ba:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '97b68ed7-8461-4345-b064-96a1dde53a86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9e5bc34-1b3a-416a-bb15-ce81423ee2a8', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1880.390556] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Creating folder: Project (e07dbff451a34b03b9250a44993b0a58). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1880.390861] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-079f7c93-0957-4553-8715-7c877ced2812 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.406605] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-994dc6ed-7dc8-41c7-8e21-2f54c964c58a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.408884] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0ff3ac-672c-4a0f-9e75-f699a77149a3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.433952] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abd6364-7898-487d-8e0f-9fa34794d98f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.434465] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1880.434465] env[62510]: value = "task-1769440" [ 1880.434465] env[62510]: _type = "Task" [ 1880.434465] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.435445] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Created folder: Project (e07dbff451a34b03b9250a44993b0a58) in parent group-v367197. [ 1880.435445] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Creating folder: Instances. Parent ref: group-v367470. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1880.435893] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1458ffb-c338-47b4-a985-85cf5051a2b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.447521] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance '841460b0-d917-44ea-88c6-0e5a3022f658' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1880.456721] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Created folder: Instances in parent group-v367470. [ 1880.456993] env[62510]: DEBUG oslo.service.loopingcall [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.457651] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1880.457819] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-acb6b202-0717-4249-b0c1-d193df40eea7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.483802] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769438, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.483802] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.483802] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769436, 'name': Destroy_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.488918] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1880.488918] env[62510]: value = "task-1769442" [ 1880.488918] env[62510]: _type = "Task" [ 1880.488918] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.496752] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769442, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.540745] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "refresh_cache-c2be17de-175a-401f-8c53-f785aeecfff4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.540805] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "refresh_cache-c2be17de-175a-401f-8c53-f785aeecfff4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.540946] env[62510]: DEBUG nova.network.neutron [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1880.667093] env[62510]: DEBUG nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1880.786197] env[62510]: DEBUG nova.network.neutron [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Successfully created port: 348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1880.953102] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769440, 'name': ReconfigVM_Task, 'duration_secs': 0.448027} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.956997] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 0c93a909-d08f-466c-bdef-a26fa35cd944/0c93a909-d08f-466c-bdef-a26fa35cd944.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1880.956997] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769438, 'name': Rename_Task, 'duration_secs': 0.541249} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.958492] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5e30ea2c-d1ad-4560-8eb0-61aaa6c5992b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance '841460b0-d917-44ea-88c6-0e5a3022f658' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1880.961945] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75aab762-cb81-4dae-9a2b-48cff09c7749 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.963734] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1880.967163] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33ff739f-7ccd-4520-95bc-45067dc81d0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.968754] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769436, 'name': Destroy_Task, 'duration_secs': 1.282433} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.969427] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Destroyed the VM [ 1880.969669] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1880.970215] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0fe35e24-e30f-4078-9417-fd07abb84d2b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.974685] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1880.974685] env[62510]: value = "task-1769443" [ 1880.974685] env[62510]: _type = "Task" [ 1880.974685] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.975960] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1880.975960] env[62510]: value = "task-1769444" [ 1880.975960] env[62510]: _type = "Task" [ 1880.975960] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.983393] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1880.983393] env[62510]: value = "task-1769445" [ 1880.983393] env[62510]: _type = "Task" [ 1880.983393] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.997663] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769445, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.006944] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.007294] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769443, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.007524] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769442, 'name': CreateVM_Task, 'duration_secs': 0.479135} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.007757] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1881.008498] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.008932] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.009167] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1881.009496] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a313671-1260-45e8-ad65-23b6c8d7a254 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.016192] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1881.016192] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e6495f-c68c-4d4e-f04f-bed77504b5f0" [ 1881.016192] env[62510]: _type = "Task" [ 1881.016192] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.035960] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e6495f-c68c-4d4e-f04f-bed77504b5f0, 'name': SearchDatastore_Task, 'duration_secs': 0.014163} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.036480] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.036805] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1881.037141] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.037383] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.037649] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1881.038445] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eea03867-da4f-4d11-9fbd-c1178147a7ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.055493] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1881.055724] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1881.056503] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69c1d992-6fdd-49c6-8624-86ad65472af2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.066026] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1881.066026] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5255391b-8352-bddd-f35f-1f0cc9304a51" [ 1881.066026] env[62510]: _type = "Task" [ 1881.066026] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.073237] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5255391b-8352-bddd-f35f-1f0cc9304a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.114161] env[62510]: DEBUG nova.network.neutron [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1881.312685] env[62510]: DEBUG nova.network.neutron [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Updating instance_info_cache with network_info: [{"id": "5a0b680e-7c0d-4008-91b8-216bc3b9da1e", "address": "fa:16:3e:94:bf:14", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a0b680e-7c", "ovs_interfaceid": "5a0b680e-7c0d-4008-91b8-216bc3b9da1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.497678] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769443, 'name': Rename_Task, 'duration_secs': 0.157228} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.498169] env[62510]: DEBUG oslo_vmware.api [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769444, 'name': PowerOnVM_Task, 'duration_secs': 0.498179} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.499100] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1881.500222] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1881.500684] env[62510]: INFO nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Took 9.71 seconds to spawn the instance on the hypervisor. [ 1881.501233] env[62510]: DEBUG nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1881.508282] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb472edf-1e03-40c6-8015-ba60cc56e8f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.508617] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb94331-e1a6-4c8d-bda8-5670fff4b846 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.511315] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769445, 'name': RemoveSnapshot_Task, 'duration_secs': 0.377323} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.511817] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1881.512100] env[62510]: DEBUG nova.compute.manager [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1881.513376] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1af8f9-7e99-40ff-84b0-7fadb308dafb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.521116] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1881.521116] env[62510]: value = "task-1769446" [ 1881.521116] env[62510]: _type = "Task" [ 1881.521116] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.533137] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769446, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.575253] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5255391b-8352-bddd-f35f-1f0cc9304a51, 'name': SearchDatastore_Task, 'duration_secs': 0.010842} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.575639] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b00ccb0-5ac4-4f08-bdee-3375524b94c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.582487] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1881.582487] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d9cd9c-b38d-7cde-3d73-0369aada814e" [ 1881.582487] env[62510]: _type = "Task" [ 1881.582487] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.592699] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d9cd9c-b38d-7cde-3d73-0369aada814e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.680166] env[62510]: DEBUG nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1881.701876] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1881.702237] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1881.702406] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1881.702585] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1881.702741] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1881.702877] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1881.705795] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1881.705795] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1881.705795] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1881.705795] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1881.705795] env[62510]: DEBUG nova.virt.hardware [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1881.705795] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923b0530-cc7d-4f58-933f-96e2562b9007 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.713682] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08336636-5461-4787-a172-018a4f5c24fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.816701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "refresh_cache-c2be17de-175a-401f-8c53-f785aeecfff4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.817437] env[62510]: DEBUG nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Instance network_info: |[{"id": "5a0b680e-7c0d-4008-91b8-216bc3b9da1e", "address": "fa:16:3e:94:bf:14", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a0b680e-7c", "ovs_interfaceid": "5a0b680e-7c0d-4008-91b8-216bc3b9da1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1881.817573] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:bf:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89ef02af-c508-432f-ae29-3a219701d584', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a0b680e-7c0d-4008-91b8-216bc3b9da1e', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1881.826031] env[62510]: DEBUG oslo.service.loopingcall [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1881.826625] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1881.826857] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0614f7f-ef8c-4e45-a9ef-58f6811f5367 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.848018] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1881.848018] env[62510]: value = "task-1769447" [ 1881.848018] env[62510]: _type = "Task" [ 1881.848018] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.859781] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769447, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.036799] env[62510]: INFO nova.compute.manager [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Shelve offloading [ 1882.038112] env[62510]: INFO nova.compute.manager [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Took 24.04 seconds to build instance. [ 1882.045227] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769446, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.094328] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d9cd9c-b38d-7cde-3d73-0369aada814e, 'name': SearchDatastore_Task, 'duration_secs': 0.011233} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.094674] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.094970] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8/5cae60b1-c0b1-4ff4-baf9-b8d1885614e8.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1882.095259] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be6e9df6-104b-488d-9ad8-103b01254ea0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.103668] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1882.103668] env[62510]: value = "task-1769448" [ 1882.103668] env[62510]: _type = "Task" [ 1882.103668] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.116419] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.187920] env[62510]: DEBUG nova.compute.manager [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Received event network-changed-5a0b680e-7c0d-4008-91b8-216bc3b9da1e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1882.188202] env[62510]: DEBUG nova.compute.manager [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Refreshing instance network info cache due to event network-changed-5a0b680e-7c0d-4008-91b8-216bc3b9da1e. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1882.188415] env[62510]: DEBUG oslo_concurrency.lockutils [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] Acquiring lock "refresh_cache-c2be17de-175a-401f-8c53-f785aeecfff4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.188570] env[62510]: DEBUG oslo_concurrency.lockutils [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] Acquired lock "refresh_cache-c2be17de-175a-401f-8c53-f785aeecfff4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.188714] env[62510]: DEBUG nova.network.neutron [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Refreshing network info cache for port 5a0b680e-7c0d-4008-91b8-216bc3b9da1e {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1882.359848] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769447, 'name': CreateVM_Task, 'duration_secs': 0.401891} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.360060] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1882.360768] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.360927] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.361284] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1882.361554] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3101c2f2-41c1-4d4b-97ea-e5745843b54c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.368410] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1882.368410] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522dd8ab-6aaf-a889-ebe8-d373bb7b2b47" [ 1882.368410] env[62510]: _type = "Task" [ 1882.368410] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.379059] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522dd8ab-6aaf-a889-ebe8-d373bb7b2b47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.541715] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a7228407-5f68-41d6-9716-7fd12e6e4c53 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.556s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.542134] env[62510]: DEBUG oslo_vmware.api [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769446, 'name': PowerOnVM_Task, 'duration_secs': 0.730953} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.542269] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1882.542468] env[62510]: INFO nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Took 8.39 seconds to spawn the instance on the hypervisor. [ 1882.542648] env[62510]: DEBUG nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1882.543591] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f302980-6f24-4313-80ae-c8c5412b54fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.546871] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1882.547201] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-003a76c1-564e-49b2-8bc8-327fea61f716 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.566522] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1882.566522] env[62510]: value = "task-1769449" [ 1882.566522] env[62510]: _type = "Task" [ 1882.566522] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.581142] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1882.582056] env[62510]: DEBUG nova.compute.manager [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1882.583378] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b184bf2a-3091-4bf5-a91d-8efeb98a98eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.596549] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.596549] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.596549] env[62510]: DEBUG nova.network.neutron [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1882.613107] env[62510]: DEBUG nova.compute.manager [req-53f275f8-b9e7-4310-97dd-0519d5367403 req-143210a7-2577-423f-8bf2-4771e36bd841 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-vif-plugged-348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1882.613307] env[62510]: DEBUG oslo_concurrency.lockutils [req-53f275f8-b9e7-4310-97dd-0519d5367403 req-143210a7-2577-423f-8bf2-4771e36bd841 service nova] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.613603] env[62510]: DEBUG oslo_concurrency.lockutils [req-53f275f8-b9e7-4310-97dd-0519d5367403 req-143210a7-2577-423f-8bf2-4771e36bd841 service nova] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.613734] env[62510]: DEBUG oslo_concurrency.lockutils [req-53f275f8-b9e7-4310-97dd-0519d5367403 req-143210a7-2577-423f-8bf2-4771e36bd841 service nova] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.613925] env[62510]: DEBUG nova.compute.manager [req-53f275f8-b9e7-4310-97dd-0519d5367403 req-143210a7-2577-423f-8bf2-4771e36bd841 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] No waiting events found dispatching network-vif-plugged-348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1882.614196] env[62510]: WARNING nova.compute.manager [req-53f275f8-b9e7-4310-97dd-0519d5367403 req-143210a7-2577-423f-8bf2-4771e36bd841 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received unexpected event network-vif-plugged-348ebdec-3667-4eea-b76e-5356163db2f9 for instance with vm_state building and task_state spawning. [ 1882.619808] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769448, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.645829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "9956e5d2-edda-47af-a3df-743ebed1154b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.645829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "9956e5d2-edda-47af-a3df-743ebed1154b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.879176] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522dd8ab-6aaf-a889-ebe8-d373bb7b2b47, 'name': SearchDatastore_Task, 'duration_secs': 0.062842} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.879487] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.879721] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1882.879955] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.880176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.880369] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1882.880627] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e742859d-40d5-4bd8-a6a6-09606947d113 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.889840] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1882.890045] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1882.890757] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e67e88a0-fe7e-4eaf-8258-361a314bb670 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.896608] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1882.896608] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cc6d0e-f3e5-bc76-dbdd-d5dcd8d6f6e1" [ 1882.896608] env[62510]: _type = "Task" [ 1882.896608] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.906844] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cc6d0e-f3e5-bc76-dbdd-d5dcd8d6f6e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.928979] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "841460b0-d917-44ea-88c6-0e5a3022f658" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.929232] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1882.929417] env[62510]: DEBUG nova.compute.manager [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Going to confirm migration 5 {{(pid=62510) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1883.038494] env[62510]: DEBUG nova.network.neutron [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Updated VIF entry in instance network info cache for port 5a0b680e-7c0d-4008-91b8-216bc3b9da1e. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1883.039057] env[62510]: DEBUG nova.network.neutron [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Updating instance_info_cache with network_info: [{"id": "5a0b680e-7c0d-4008-91b8-216bc3b9da1e", "address": "fa:16:3e:94:bf:14", "network": {"id": "22bd7136-e6e5-445f-8cd0-6cfe0341410c", "bridge": "br-int", "label": "tempest-ServersTestJSON-2034430291-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "94a46473611d4b22be7c66c909d1b348", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89ef02af-c508-432f-ae29-3a219701d584", "external-id": "nsx-vlan-transportzone-313", "segmentation_id": 313, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a0b680e-7c", "ovs_interfaceid": "5a0b680e-7c0d-4008-91b8-216bc3b9da1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.060582] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.061062] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.061383] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.061664] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.061916] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.073950] env[62510]: INFO nova.compute.manager [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Terminating instance [ 1883.079647] env[62510]: INFO nova.compute.manager [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Took 23.79 seconds to build instance. [ 1883.115589] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523366} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.115870] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8/5cae60b1-c0b1-4ff4-baf9-b8d1885614e8.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1883.116106] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1883.116370] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d05a2970-8888-440e-bb17-bbfdc78fc030 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.125955] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1883.125955] env[62510]: value = "task-1769450" [ 1883.125955] env[62510]: _type = "Task" [ 1883.125955] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.135675] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769450, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.149753] env[62510]: DEBUG nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1883.153368] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "82dceacf-1898-4d86-b1c6-552a24ab565f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.153812] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.153812] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "82dceacf-1898-4d86-b1c6-552a24ab565f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.153999] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.154193] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.156023] env[62510]: INFO nova.compute.manager [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Terminating instance [ 1883.253901] env[62510]: DEBUG nova.network.neutron [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Successfully updated port: 348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1883.411060] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cc6d0e-f3e5-bc76-dbdd-d5dcd8d6f6e1, 'name': SearchDatastore_Task, 'duration_secs': 0.011041} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.411884] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff0cdecc-e615-47b6-9ae3-88c8a9d553f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.417796] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1883.417796] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bae601-f4bf-c0bd-7df7-262024420c09" [ 1883.417796] env[62510]: _type = "Task" [ 1883.417796] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.426688] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bae601-f4bf-c0bd-7df7-262024420c09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.459177] env[62510]: DEBUG nova.objects.instance [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lazy-loading 'flavor' on Instance uuid 01204162-bf8e-46e0-bcf4-00df9ed7e7ce {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.501816] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.501816] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.501816] env[62510]: DEBUG nova.network.neutron [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1883.501816] env[62510]: DEBUG nova.objects.instance [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'info_cache' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.517870] env[62510]: DEBUG nova.network.neutron [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap022a0379-8a", "ovs_interfaceid": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.542689] env[62510]: DEBUG oslo_concurrency.lockutils [req-37b2fbff-7c53-46c9-9c63-38e4cd748049 req-13cecdec-8581-4c21-a794-141c094aef77 service nova] Releasing lock "refresh_cache-c2be17de-175a-401f-8c53-f785aeecfff4" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.583229] env[62510]: DEBUG oslo_concurrency.lockutils [None req-65e8aaa6-05bf-4596-a232-71539fb518c2 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.304s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.585207] env[62510]: DEBUG nova.compute.manager [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1883.585453] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1883.586788] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0d64e0-2582-475d-87d6-cace364e08ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.596679] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1883.597564] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8f3b661-6a30-4b13-80cd-efcd3f4f358d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.606215] env[62510]: DEBUG oslo_vmware.api [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1883.606215] env[62510]: value = "task-1769451" [ 1883.606215] env[62510]: _type = "Task" [ 1883.606215] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.615992] env[62510]: DEBUG oslo_vmware.api [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.635818] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769450, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092252} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.636285] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1883.636874] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5168e05f-a8be-4630-9093-0858d0c2f186 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.660954] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8/5cae60b1-c0b1-4ff4-baf9-b8d1885614e8.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1883.663931] env[62510]: DEBUG nova.compute.manager [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1883.664150] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1883.664391] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ccf5da3-22b0-4db4-be6d-8e4b0effdc3a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.682577] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda4c116-1660-4eba-b55b-53e4ff6f8789 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.693351] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1883.695027] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fce2d0fa-85d5-45a3-9b37-038547446bb7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.696739] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1883.696739] env[62510]: value = "task-1769452" [ 1883.696739] env[62510]: _type = "Task" [ 1883.696739] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.701460] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.701698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.703312] env[62510]: INFO nova.compute.claims [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1883.708127] env[62510]: DEBUG oslo_vmware.api [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1883.708127] env[62510]: value = "task-1769453" [ 1883.708127] env[62510]: _type = "Task" [ 1883.708127] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.718023] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769452, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.724261] env[62510]: DEBUG oslo_vmware.api [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.756283] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.756283] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.756283] env[62510]: DEBUG nova.network.neutron [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1883.928672] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bae601-f4bf-c0bd-7df7-262024420c09, 'name': SearchDatastore_Task, 'duration_secs': 0.011858} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.928955] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.929309] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c2be17de-175a-401f-8c53-f785aeecfff4/c2be17de-175a-401f-8c53-f785aeecfff4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1883.929579] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2e51812-b6a2-43c7-861c-ae3c8d9d4d90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.937207] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1883.937207] env[62510]: value = "task-1769454" [ 1883.937207] env[62510]: _type = "Task" [ 1883.937207] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.941295] env[62510]: DEBUG oslo_concurrency.lockutils [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "0c93a909-d08f-466c-bdef-a26fa35cd944" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.941523] env[62510]: DEBUG oslo_concurrency.lockutils [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.941703] env[62510]: DEBUG nova.compute.manager [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1883.942514] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896b5d78-ce58-4f4d-93af-0db9bb564fe0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.952950] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.953271] env[62510]: DEBUG nova.compute.manager [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1883.953852] env[62510]: DEBUG nova.objects.instance [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'flavor' on Instance uuid 0c93a909-d08f-466c-bdef-a26fa35cd944 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.966192] env[62510]: DEBUG oslo_concurrency.lockutils [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.966369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquired lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.019993] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.116422] env[62510]: DEBUG oslo_vmware.api [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769451, 'name': PowerOffVM_Task, 'duration_secs': 0.245054} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.116752] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1884.116970] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1884.117256] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3902a6e5-f658-4f98-9f63-43d676c0632c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.205782] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1884.205955] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1884.206157] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleting the datastore file [datastore1] 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1884.209357] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d795eb1-d307-46bb-9e84-4aa8690a6968 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.213379] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769452, 'name': ReconfigVM_Task, 'duration_secs': 0.413914} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.217186] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8/5cae60b1-c0b1-4ff4-baf9-b8d1885614e8.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1884.218326] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd57f2dc-d506-4d75-8edd-3ac2406c098a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.227254] env[62510]: DEBUG oslo_vmware.api [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769453, 'name': PowerOffVM_Task, 'duration_secs': 0.16373} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.229787] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1884.230010] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1884.230367] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1884.230367] env[62510]: value = "task-1769457" [ 1884.230367] env[62510]: _type = "Task" [ 1884.230367] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.230599] env[62510]: DEBUG oslo_vmware.api [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1884.230599] env[62510]: value = "task-1769456" [ 1884.230599] env[62510]: _type = "Task" [ 1884.230599] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.230800] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f9f6f4e-09dc-4ab7-b5c9-095b87a4c609 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.245635] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769457, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.248905] env[62510]: DEBUG oslo_vmware.api [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.262351] env[62510]: DEBUG nova.compute.manager [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1884.262584] env[62510]: DEBUG nova.compute.manager [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing instance network info cache due to event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1884.262771] env[62510]: DEBUG oslo_concurrency.lockutils [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.300570] env[62510]: DEBUG nova.network.neutron [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1884.439588] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1884.439813] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1884.439996] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleting the datastore file [datastore1] 82dceacf-1898-4d86-b1c6-552a24ab565f {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1884.443581] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a2e73a7e-c4e6-4455-8d5b-0d957e4b34b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.454344] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769454, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.458866] env[62510]: DEBUG oslo_vmware.api [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1884.458866] env[62510]: value = "task-1769459" [ 1884.458866] env[62510]: _type = "Task" [ 1884.458866] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.473629] env[62510]: DEBUG oslo_vmware.api [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.478866] env[62510]: DEBUG nova.network.neutron [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.744923] env[62510]: DEBUG nova.network.neutron [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.752751] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769457, 'name': Rename_Task, 'duration_secs': 0.379945} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.756313] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1884.756966] env[62510]: DEBUG oslo_vmware.api [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.416145} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.757240] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0aa0f182-2c94-4e19-a5f3-d3a487380a57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.759821] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1884.759821] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1884.759821] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1884.759821] env[62510]: INFO nova.compute.manager [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1884.759963] env[62510]: DEBUG oslo.service.loopingcall [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1884.760168] env[62510]: DEBUG nova.compute.manager [-] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1884.760740] env[62510]: DEBUG nova.network.neutron [-] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1884.774174] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1884.774174] env[62510]: value = "task-1769460" [ 1884.774174] env[62510]: _type = "Task" [ 1884.774174] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.785736] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.956780] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769454, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583389} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.957901] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] c2be17de-175a-401f-8c53-f785aeecfff4/c2be17de-175a-401f-8c53-f785aeecfff4.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1884.958437] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1884.958628] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81292180-54c0-4165-8645-d7e84ba5a02c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.968879] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1884.969427] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae94606e-349d-481d-98f5-ec28dca177c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.974334] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1884.974334] env[62510]: value = "task-1769461" [ 1884.974334] env[62510]: _type = "Task" [ 1884.974334] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.977960] env[62510]: DEBUG oslo_vmware.api [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215225} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.985783] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1884.986075] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1884.986335] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1884.986540] env[62510]: INFO nova.compute.manager [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Took 1.32 seconds to destroy the instance on the hypervisor. [ 1884.987050] env[62510]: DEBUG oslo.service.loopingcall [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1884.987500] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.987843] env[62510]: DEBUG nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Instance network_info: |[{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1884.988250] env[62510]: DEBUG oslo_vmware.api [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1884.988250] env[62510]: value = "task-1769462" [ 1884.988250] env[62510]: _type = "Task" [ 1884.988250] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.988680] env[62510]: DEBUG nova.compute.manager [-] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1884.988821] env[62510]: DEBUG nova.network.neutron [-] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1884.990661] env[62510]: DEBUG oslo_concurrency.lockutils [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.991270] env[62510]: DEBUG nova.network.neutron [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1884.992281] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:0c:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e0cfc48-d93b-4477-8082-69a2f7aa7701', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '348ebdec-3667-4eea-b76e-5356163db2f9', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1885.005526] env[62510]: DEBUG oslo.service.loopingcall [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.009937] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1885.019127] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-857ccbd9-2fa4-4e4c-8501-7900767ad759 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.035401] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769461, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.044737] env[62510]: DEBUG oslo_vmware.api [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.046908] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1885.046908] env[62510]: value = "task-1769463" [ 1885.046908] env[62510]: _type = "Task" [ 1885.046908] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.056357] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769463, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.104498] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c72d3a-f303-42e7-8257-70d498b450f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.112695] env[62510]: DEBUG nova.compute.manager [req-9292c145-7ce8-4eee-81ee-bf88d9cf3fbf req-207318b8-9463-43c7-8789-589d88790fd4 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Received event network-vif-deleted-da533fd5-935b-4b32-8845-bea1060e4ca1 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1885.112891] env[62510]: INFO nova.compute.manager [req-9292c145-7ce8-4eee-81ee-bf88d9cf3fbf req-207318b8-9463-43c7-8789-589d88790fd4 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Neutron deleted interface da533fd5-935b-4b32-8845-bea1060e4ca1; detaching it from the instance and deleting it from the info cache [ 1885.113097] env[62510]: DEBUG nova.network.neutron [req-9292c145-7ce8-4eee-81ee-bf88d9cf3fbf req-207318b8-9463-43c7-8789-589d88790fd4 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.121156] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2669702-d98f-49e3-a6e4-ef1b8d47be5b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.162443] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69df634a-2ce6-48b8-8705-fe3df0ae3b8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.168925] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d508faa5-b416-4a9d-aa32-032480e2c68d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.175233] env[62510]: DEBUG nova.network.neutron [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1885.189451] env[62510]: DEBUG nova.compute.provider_tree [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1885.249830] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1885.251327] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fef9e42-d23d-42e7-bb3e-a2ba2cc06859 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.256566] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.256808] env[62510]: DEBUG nova.objects.instance [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'migration_context' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1885.263170] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1885.263170] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-256e3e24-e564-455e-83b8-8f44ad1c03ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.287095] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769460, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.355159] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1885.355397] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1885.355611] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleting the datastore file [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1885.355943] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-127f0a59-5c8b-4798-9c55-c4df2b2ba7a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.369013] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1885.369013] env[62510]: value = "task-1769465" [ 1885.369013] env[62510]: _type = "Task" [ 1885.369013] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.377960] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.487579] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769461, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.189704} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.487897] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1885.488699] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca2e96c-73eb-4c8b-b4ec-ab027c1c3e58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.511495] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] c2be17de-175a-401f-8c53-f785aeecfff4/c2be17de-175a-401f-8c53-f785aeecfff4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1885.513936] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f993896b-67d6-4b98-9e42-aa9983a64e12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.537152] env[62510]: DEBUG oslo_vmware.api [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769462, 'name': PowerOffVM_Task, 'duration_secs': 0.427955} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.538449] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1885.538668] env[62510]: DEBUG nova.compute.manager [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1885.539033] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1885.539033] env[62510]: value = "task-1769466" [ 1885.539033] env[62510]: _type = "Task" [ 1885.539033] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.539722] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8055eb4-9e9b-463f-ba09-72cf2ba5bfc0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.556017] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769466, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.562483] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769463, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.581955] env[62510]: DEBUG nova.network.neutron [-] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.616563] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-249a2fcd-8598-4c28-857b-893804293cea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.629329] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83791a9-3fa3-4a75-b62f-496a39ce9959 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.672664] env[62510]: DEBUG nova.compute.manager [req-9292c145-7ce8-4eee-81ee-bf88d9cf3fbf req-207318b8-9463-43c7-8789-589d88790fd4 service nova] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Detach interface failed, port_id=da533fd5-935b-4b32-8845-bea1060e4ca1, reason: Instance 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1885.724437] env[62510]: ERROR nova.scheduler.client.report [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [req-5b1f9aaf-6547-4bae-a215-55a163dcf8e7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5b1f9aaf-6547-4bae-a215-55a163dcf8e7"}]} [ 1885.744463] env[62510]: DEBUG nova.scheduler.client.report [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1885.761731] env[62510]: DEBUG nova.scheduler.client.report [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1885.762015] env[62510]: DEBUG nova.compute.provider_tree [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1885.768021] env[62510]: DEBUG nova.objects.base [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Object Instance<841460b0-d917-44ea-88c6-0e5a3022f658> lazy-loaded attributes: info_cache,migration_context {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1885.768021] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6df400-ccce-4e25-a179-6d6defe4ad51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.790054] env[62510]: DEBUG nova.scheduler.client.report [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1885.797136] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a20ff37-09fe-480b-9bda-d4dd92ebfbe2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.807139] env[62510]: DEBUG oslo_vmware.api [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769460, 'name': PowerOnVM_Task, 'duration_secs': 0.537988} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.810883] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1885.810883] env[62510]: INFO nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Took 9.14 seconds to spawn the instance on the hypervisor. [ 1885.810883] env[62510]: DEBUG nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1885.810883] env[62510]: DEBUG oslo_vmware.api [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1885.810883] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52fae91c-9248-445e-6190-f5662e1e3e89" [ 1885.810883] env[62510]: _type = "Task" [ 1885.810883] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.810883] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f179ea-abee-4fce-8350-aebce3ecba47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.821997] env[62510]: DEBUG oslo_vmware.api [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fae91c-9248-445e-6190-f5662e1e3e89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.832756] env[62510]: DEBUG nova.scheduler.client.report [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1885.836439] env[62510]: DEBUG nova.network.neutron [-] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.842126] env[62510]: DEBUG nova.network.neutron [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updated VIF entry in instance network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1885.842391] env[62510]: DEBUG nova.network.neutron [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.882207] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.061206] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769466, 'name': ReconfigVM_Task, 'duration_secs': 0.494871} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.061657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-492f71df-8daa-4b19-90e6-e94fd7c62a3e tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.120s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.062879] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Reconfigured VM instance instance-00000069 to attach disk [datastore1] c2be17de-175a-401f-8c53-f785aeecfff4/c2be17de-175a-401f-8c53-f785aeecfff4.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1886.063703] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-121a9efe-ee5d-4163-83e9-cb7a5d58fbc5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.068630] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769463, 'name': CreateVM_Task, 'duration_secs': 0.725858} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.069398] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1886.072357] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.072524] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.072867] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1886.073396] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4d220e1-e816-4479-95bf-0e53b01e630c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.076468] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1886.076468] env[62510]: value = "task-1769467" [ 1886.076468] env[62510]: _type = "Task" [ 1886.076468] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.084113] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1886.084113] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5235963e-edba-2dbd-45f2-353302e2a42d" [ 1886.084113] env[62510]: _type = "Task" [ 1886.084113] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.086702] env[62510]: INFO nova.compute.manager [-] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Took 1.33 seconds to deallocate network for instance. [ 1886.097801] env[62510]: DEBUG nova.network.neutron [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.106409] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769467, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.117205] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5235963e-edba-2dbd-45f2-353302e2a42d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.152677] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ef6140-c9d2-4d53-b5a1-62cb839cb7d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.161285] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6022d0-5a69-4430-9903-a2780c6280cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.194585] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8d7c6c-4092-4ab2-ac0b-bb42bee90262 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.204242] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b166f743-c977-4279-9709-500c1b47ca1c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.220196] env[62510]: DEBUG nova.compute.provider_tree [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1886.290538] env[62510]: DEBUG nova.compute.manager [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-vif-unplugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1886.291083] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.291083] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.291273] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.291361] env[62510]: DEBUG nova.compute.manager [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] No waiting events found dispatching network-vif-unplugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1886.291463] env[62510]: WARNING nova.compute.manager [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received unexpected event network-vif-unplugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 for instance with vm_state shelved and task_state shelving_offloading. [ 1886.291627] env[62510]: DEBUG nova.compute.manager [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Received event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1886.291785] env[62510]: DEBUG nova.compute.manager [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing instance network info cache due to event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1886.291948] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Acquiring lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.323921] env[62510]: DEBUG oslo_vmware.api [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fae91c-9248-445e-6190-f5662e1e3e89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.339727] env[62510]: INFO nova.compute.manager [-] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Took 1.35 seconds to deallocate network for instance. [ 1886.342999] env[62510]: INFO nova.compute.manager [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Took 24.62 seconds to build instance. [ 1886.345749] env[62510]: DEBUG oslo_concurrency.lockutils [req-10840e7c-41c4-41a0-a967-4d8ec454a8fd req-39ecf2fb-9ed4-4c57-a77b-64d90f43b8c3 service nova] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.380969] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.589282] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769467, 'name': Rename_Task, 'duration_secs': 0.236492} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.592613] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1886.592905] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab575d38-aff2-46df-a665-baa297feca04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.601218] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5235963e-edba-2dbd-45f2-353302e2a42d, 'name': SearchDatastore_Task, 'duration_secs': 0.307938} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.602510] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.602752] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1886.602981] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.603168] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.603352] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1886.603660] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1886.603660] env[62510]: value = "task-1769468" [ 1886.603660] env[62510]: _type = "Task" [ 1886.603660] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.603851] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00b3ac98-c34d-4abe-8b22-da0bae8757a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.606309] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.609750] env[62510]: DEBUG oslo_concurrency.lockutils [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Releasing lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1886.610020] env[62510]: DEBUG nova.compute.manager [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Inject network info {{(pid=62510) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7600}} [ 1886.610300] env[62510]: DEBUG nova.compute.manager [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] network_info to inject: |[{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7601}} [ 1886.615189] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Reconfiguring VM instance to set the machine id {{(pid=62510) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1886.616885] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Acquired lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.617101] env[62510]: DEBUG nova.network.neutron [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1886.619281] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-359db6ee-fadf-4395-b245-f1bb0bc1a3e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.630228] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1886.630414] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1886.635419] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a851bd0-6131-485e-b86b-3f873e985fa9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.638042] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769468, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.642665] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1886.642665] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52317b1e-bbee-e191-5fc5-38fcc7dac744" [ 1886.642665] env[62510]: _type = "Task" [ 1886.642665] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.647614] env[62510]: DEBUG oslo_vmware.api [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1886.647614] env[62510]: value = "task-1769469" [ 1886.647614] env[62510]: _type = "Task" [ 1886.647614] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.654359] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52317b1e-bbee-e191-5fc5-38fcc7dac744, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.660723] env[62510]: DEBUG oslo_vmware.api [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769469, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.753076] env[62510]: DEBUG nova.scheduler.client.report [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 139 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1886.753357] env[62510]: DEBUG nova.compute.provider_tree [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 139 to 140 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1886.753538] env[62510]: DEBUG nova.compute.provider_tree [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1886.774555] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "0c93a909-d08f-466c-bdef-a26fa35cd944" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.774820] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.775044] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "0c93a909-d08f-466c-bdef-a26fa35cd944-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.775255] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.775492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.777791] env[62510]: INFO nova.compute.manager [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Terminating instance [ 1886.824221] env[62510]: DEBUG oslo_vmware.api [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fae91c-9248-445e-6190-f5662e1e3e89, 'name': SearchDatastore_Task, 'duration_secs': 0.581152} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.824541] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.847256] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b34858ef-db38-4094-966f-117719c39997 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.129s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.848328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.880039] env[62510]: DEBUG oslo_vmware.api [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.331136} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.880039] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1886.880278] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1886.880476] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1886.904610] env[62510]: INFO nova.scheduler.client.report [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted allocations for instance cf4160a8-1160-45fc-b9e5-e9526b6c1506 [ 1887.045100] env[62510]: DEBUG nova.objects.instance [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lazy-loading 'flavor' on Instance uuid 01204162-bf8e-46e0-bcf4-00df9ed7e7ce {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1887.116744] env[62510]: DEBUG oslo_vmware.api [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769468, 'name': PowerOnVM_Task, 'duration_secs': 0.423448} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.117104] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1887.118025] env[62510]: INFO nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1887.118025] env[62510]: DEBUG nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1887.118349] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd12c65c-3e21-45fb-8f97-8a46e76b96b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.139050] env[62510]: DEBUG nova.compute.manager [req-0e7479d7-caaf-4313-a01d-aab4bfa51604 req-384acc1e-8fad-4439-b796-4d9b4ddf367d service nova] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Received event network-vif-deleted-359f36f0-f995-4822-b3df-83b9a561be76 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1887.157022] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52317b1e-bbee-e191-5fc5-38fcc7dac744, 'name': SearchDatastore_Task, 'duration_secs': 0.012357} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.161133] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-525e826b-cfa9-4dcc-a5cd-201d16296eb5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.167620] env[62510]: DEBUG oslo_vmware.api [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769469, 'name': ReconfigVM_Task, 'duration_secs': 0.153564} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.168286] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-64606111-bfb5-4200-afe5-adef62850620 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Reconfigured VM instance to set the machine id {{(pid=62510) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1887.172523] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1887.172523] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52306678-0085-74fe-04d2-ace048e5ca3e" [ 1887.172523] env[62510]: _type = "Task" [ 1887.172523] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.182371] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52306678-0085-74fe-04d2-ace048e5ca3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.258720] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.557s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.259309] env[62510]: DEBUG nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1887.262544] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.656s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.262544] env[62510]: DEBUG nova.objects.instance [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lazy-loading 'resources' on Instance uuid 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1887.281427] env[62510]: DEBUG nova.compute.manager [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1887.281638] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1887.282503] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1a3565-86f1-4351-aa67-76c62f3af340 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.290950] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1887.291927] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da550031-db31-4499-8d89-75c4461a9194 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.409176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.453454] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1887.453648] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1887.453792] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleting the datastore file [datastore1] 0c93a909-d08f-466c-bdef-a26fa35cd944 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1887.454093] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16144f92-49e9-44b9-84f4-0931bacbc9e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.462228] env[62510]: DEBUG oslo_vmware.api [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1887.462228] env[62510]: value = "task-1769471" [ 1887.462228] env[62510]: _type = "Task" [ 1887.462228] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.476114] env[62510]: DEBUG oslo_vmware.api [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.536756] env[62510]: DEBUG nova.network.neutron [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updated VIF entry in instance network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1887.537105] env[62510]: DEBUG nova.network.neutron [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1887.549290] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.642511] env[62510]: INFO nova.compute.manager [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Took 24.57 seconds to build instance. [ 1887.685922] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52306678-0085-74fe-04d2-ace048e5ca3e, 'name': SearchDatastore_Task, 'duration_secs': 0.011868} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.686213] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.686462] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 72f8492b-304a-4451-ab40-4cdfe36b9e19/72f8492b-304a-4451-ab40-4cdfe36b9e19.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1887.686744] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b22465e-7b19-4b02-bb6f-542d3082dcac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.693602] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1887.693602] env[62510]: value = "task-1769472" [ 1887.693602] env[62510]: _type = "Task" [ 1887.693602] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.701986] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.718667] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.718919] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.719159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.719346] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.719514] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.721840] env[62510]: INFO nova.compute.manager [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Terminating instance [ 1887.764898] env[62510]: DEBUG nova.compute.utils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1887.771252] env[62510]: DEBUG nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1887.772777] env[62510]: DEBUG nova.network.neutron [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1887.826024] env[62510]: DEBUG nova.policy [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e483d7dc32804985bc9af5128670131b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b13a257970e4a9a9f9cfecaaf37d9da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1887.973705] env[62510]: DEBUG oslo_vmware.api [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769471, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121041} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.976969] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1887.977264] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1887.977550] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1887.977827] env[62510]: INFO nova.compute.manager [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Took 0.70 seconds to destroy the instance on the hypervisor. [ 1887.978141] env[62510]: DEBUG oslo.service.loopingcall [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1887.978708] env[62510]: DEBUG nova.compute.manager [-] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1887.979647] env[62510]: DEBUG nova.network.neutron [-] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1888.043078] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Releasing lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.043450] env[62510]: DEBUG nova.compute.manager [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1888.043632] env[62510]: DEBUG nova.compute.manager [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing instance network info cache due to event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1888.044054] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Acquiring lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.044054] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Acquired lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.044205] env[62510]: DEBUG nova.network.neutron [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1888.045593] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquired lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.099511] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7fa92b-7cc4-492c-840a-2c48ca23160b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.108572] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1349ee3-17c4-4911-a5f5-d43613eb55df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.143354] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6130305-12f3-4701-88a0-36a88c1f26ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.147078] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2299fa67-6f7c-44aa-9677-4aa7965b9599 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.080s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.153815] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e504727a-e927-4735-b6fd-2d55e50984a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.172377] env[62510]: DEBUG nova.compute.provider_tree [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1888.208038] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769472, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.227271] env[62510]: DEBUG nova.compute.manager [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1888.227623] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1888.228491] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d50f50-8d6a-40d3-b495-ad28619cffeb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.236957] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1888.239760] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2851da4e-7046-42ec-b34d-91ebb49fc4b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.249150] env[62510]: DEBUG oslo_vmware.api [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1888.249150] env[62510]: value = "task-1769473" [ 1888.249150] env[62510]: _type = "Task" [ 1888.249150] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.258182] env[62510]: DEBUG oslo_vmware.api [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.272410] env[62510]: DEBUG nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1888.449451] env[62510]: DEBUG nova.network.neutron [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Successfully created port: aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1888.517623] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.517945] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.706431] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769472, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516191} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.706431] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 72f8492b-304a-4451-ab40-4cdfe36b9e19/72f8492b-304a-4451-ab40-4cdfe36b9e19.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1888.706431] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1888.706431] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5105375b-c927-4561-8a98-318dec8b0200 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.709150] env[62510]: DEBUG nova.scheduler.client.report [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 140 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1888.709450] env[62510]: DEBUG nova.compute.provider_tree [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 140 to 141 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1888.709725] env[62510]: DEBUG nova.compute.provider_tree [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1888.715584] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1888.715584] env[62510]: value = "task-1769474" [ 1888.715584] env[62510]: _type = "Task" [ 1888.715584] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.727538] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.762857] env[62510]: DEBUG oslo_vmware.api [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769473, 'name': PowerOffVM_Task, 'duration_secs': 0.301366} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.763176] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1888.763359] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1888.763631] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1a846b2-4171-4d9b-a399-a3f458b893b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.767975] env[62510]: DEBUG nova.network.neutron [-] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.829390] env[62510]: DEBUG nova.network.neutron [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updated VIF entry in instance network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1888.829741] env[62510]: DEBUG nova.network.neutron [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap022a0379-8a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.877693] env[62510]: DEBUG nova.network.neutron [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1889.022611] env[62510]: INFO nova.compute.manager [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Detaching volume b844eb5c-7644-4bb5-900e-d0a16620fbe8 [ 1889.041400] env[62510]: DEBUG nova.compute.manager [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Received event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1889.041599] env[62510]: DEBUG nova.compute.manager [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing instance network info cache due to event network-changed-53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1889.041791] env[62510]: DEBUG oslo_concurrency.lockutils [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] Acquiring lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.054925] env[62510]: INFO nova.virt.block_device [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Attempting to driver detach volume b844eb5c-7644-4bb5-900e-d0a16620fbe8 from mountpoint /dev/sdb [ 1889.055323] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1889.055608] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1889.056964] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f1559e-4386-4e91-b2b7-6b50b9c0d7c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.082168] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a59041-81bd-4fd0-a75c-c4a68014b5d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.089736] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9150d612-ac3c-4d2c-985e-ca0a9e511655 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.112226] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b03180e-41a0-49ce-b6ad-1049c441d8d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.129890] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] The volume has not been displaced from its original location: [datastore1] volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8/volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1889.135494] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1889.135820] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a0566a0-36e9-4087-be04-11f529f11354 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.155025] env[62510]: DEBUG oslo_vmware.api [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1889.155025] env[62510]: value = "task-1769476" [ 1889.155025] env[62510]: _type = "Task" [ 1889.155025] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.164024] env[62510]: DEBUG oslo_vmware.api [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769476, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.217484] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.955s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.219939] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.395s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.231615] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07202} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.232056] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1889.233131] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bec059b-5c1d-4381-87ca-494d9551c2d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.259029] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 72f8492b-304a-4451-ab40-4cdfe36b9e19/72f8492b-304a-4451-ab40-4cdfe36b9e19.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1889.260272] env[62510]: INFO nova.scheduler.client.report [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted allocations for instance 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c [ 1889.261438] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d4ea9a5-3b6e-440b-9997-4a4aeff4556e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.279022] env[62510]: INFO nova.compute.manager [-] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Took 1.30 seconds to deallocate network for instance. [ 1889.284491] env[62510]: DEBUG nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1889.289304] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1889.289304] env[62510]: value = "task-1769477" [ 1889.289304] env[62510]: _type = "Task" [ 1889.289304] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.291226] env[62510]: DEBUG nova.compute.manager [req-d4c27d17-6d66-4408-b053-bf10a04d86f1 req-d126109f-4601-4b1e-840a-87f0a1a6015e service nova] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Received event network-vif-deleted-b0fce605-e00b-4356-8005-b66dcb30663b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1889.303222] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769477, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.315245] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1889.315515] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1889.317020] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1889.317020] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1889.317020] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1889.317020] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1889.317020] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1889.317871] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1889.317871] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1889.317871] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1889.317871] env[62510]: DEBUG nova.virt.hardware [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1889.319029] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc7fe6a-ee39-46ff-bbd4-1ea1c07b9045 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.328029] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3950bc64-7b5c-4cb6-8bc1-e80ffc0be26e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.334437] env[62510]: DEBUG oslo_concurrency.lockutils [req-7fcad058-b81d-4a41-9b6f-2d934a5750ef req-7ac48c87-3b33-4ec6-b8b7-be0b06fa746a service nova] Releasing lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.526181] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "c2be17de-175a-401f-8c53-f785aeecfff4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.526332] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.526518] env[62510]: DEBUG nova.compute.manager [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1889.529866] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4075ebc2-0b2b-4505-8a14-2fafc4e0cc26 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.537413] env[62510]: DEBUG nova.compute.manager [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1889.538120] env[62510]: DEBUG nova.objects.instance [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'flavor' on Instance uuid c2be17de-175a-401f-8c53-f785aeecfff4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1889.661009] env[62510]: DEBUG nova.network.neutron [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.669137] env[62510]: DEBUG oslo_vmware.api [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769476, 'name': ReconfigVM_Task, 'duration_secs': 0.241143} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.669511] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1889.675484] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19e55b95-117d-4ea6-ada1-ab35d7903242 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.693796] env[62510]: DEBUG oslo_vmware.api [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1889.693796] env[62510]: value = "task-1769478" [ 1889.693796] env[62510]: _type = "Task" [ 1889.693796] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.704207] env[62510]: DEBUG oslo_vmware.api [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769478, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.783445] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7fbe06b1-fc61-4348-b608-dc3f70ea944c tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.722s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.790315] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.800645] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769477, 'name': ReconfigVM_Task, 'duration_secs': 0.348357} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.803363] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 72f8492b-304a-4451-ab40-4cdfe36b9e19/72f8492b-304a-4451-ab40-4cdfe36b9e19.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.804760] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf3be3a7-a331-4ae7-8d78-026485a59b00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.813086] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1889.813086] env[62510]: value = "task-1769479" [ 1889.813086] env[62510]: _type = "Task" [ 1889.813086] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.826331] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769479, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.995071] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ded5764-b533-442b-bcdc-a6aa98d67196 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.004149] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9dc867-1fa3-4b17-b5b9-78dece79be1a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.037860] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7ee3e1-d44c-4019-902c-9025272992e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.047381] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967f9fbb-97ff-47b0-b792-705ae4934145 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.063751] env[62510]: DEBUG nova.compute.provider_tree [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1890.163670] env[62510]: DEBUG oslo_concurrency.lockutils [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Releasing lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1890.163941] env[62510]: DEBUG nova.compute.manager [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Inject network info {{(pid=62510) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7600}} [ 1890.164215] env[62510]: DEBUG nova.compute.manager [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] network_info to inject: |[{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7601}} [ 1890.169097] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Reconfiguring VM instance to set the machine id {{(pid=62510) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1890.169376] env[62510]: DEBUG oslo_concurrency.lockutils [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] Acquired lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.169558] env[62510]: DEBUG nova.network.neutron [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Refreshing network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1890.171174] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27123ae2-3acd-40c1-a9a4-a6b4b0c1049c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.185867] env[62510]: DEBUG nova.compute.manager [req-ae244084-37d4-45ca-8d33-048f73aa8b50 req-261b06c5-4d0c-460e-b453-6d5f312a1ab5 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Received event network-vif-plugged-aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1890.186080] env[62510]: DEBUG oslo_concurrency.lockutils [req-ae244084-37d4-45ca-8d33-048f73aa8b50 req-261b06c5-4d0c-460e-b453-6d5f312a1ab5 service nova] Acquiring lock "9956e5d2-edda-47af-a3df-743ebed1154b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.186413] env[62510]: DEBUG oslo_concurrency.lockutils [req-ae244084-37d4-45ca-8d33-048f73aa8b50 req-261b06c5-4d0c-460e-b453-6d5f312a1ab5 service nova] Lock "9956e5d2-edda-47af-a3df-743ebed1154b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.186586] env[62510]: DEBUG oslo_concurrency.lockutils [req-ae244084-37d4-45ca-8d33-048f73aa8b50 req-261b06c5-4d0c-460e-b453-6d5f312a1ab5 service nova] Lock "9956e5d2-edda-47af-a3df-743ebed1154b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.186757] env[62510]: DEBUG nova.compute.manager [req-ae244084-37d4-45ca-8d33-048f73aa8b50 req-261b06c5-4d0c-460e-b453-6d5f312a1ab5 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] No waiting events found dispatching network-vif-plugged-aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1890.186923] env[62510]: WARNING nova.compute.manager [req-ae244084-37d4-45ca-8d33-048f73aa8b50 req-261b06c5-4d0c-460e-b453-6d5f312a1ab5 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Received unexpected event network-vif-plugged-aa1b717d-79b9-457c-829a-a4e12f0187c4 for instance with vm_state building and task_state spawning. [ 1890.194609] env[62510]: DEBUG oslo_vmware.api [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1890.194609] env[62510]: value = "task-1769480" [ 1890.194609] env[62510]: _type = "Task" [ 1890.194609] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.208403] env[62510]: DEBUG oslo_vmware.api [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769478, 'name': ReconfigVM_Task, 'duration_secs': 0.163833} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.212403] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367443', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'name': 'volume-b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'bc474f8b-dd3b-4d7a-a8e0-fea5570b3091', 'attached_at': '', 'detached_at': '', 'volume_id': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8', 'serial': 'b844eb5c-7644-4bb5-900e-d0a16620fbe8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1890.214741] env[62510]: DEBUG oslo_vmware.api [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769480, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.222493] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.328384] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769479, 'name': Rename_Task, 'duration_secs': 0.173375} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.328671] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1890.328916] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1dcc1a3e-2333-47a9-ab0c-72837ef0fb4b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.335383] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1890.335383] env[62510]: value = "task-1769481" [ 1890.335383] env[62510]: _type = "Task" [ 1890.335383] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.343930] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.547444] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1890.547794] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-932a3ab9-c3a0-4a14-914c-1ce1e21d977f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.555272] env[62510]: DEBUG oslo_vmware.api [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1890.555272] env[62510]: value = "task-1769482" [ 1890.555272] env[62510]: _type = "Task" [ 1890.555272] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.566780] env[62510]: DEBUG oslo_vmware.api [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769482, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.589515] env[62510]: ERROR nova.scheduler.client.report [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [req-40ac6429-91b4-49e9-b315-f7958ba11a71] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-40ac6429-91b4-49e9-b315-f7958ba11a71"}]} [ 1890.613992] env[62510]: DEBUG nova.scheduler.client.report [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1890.628738] env[62510]: DEBUG nova.scheduler.client.report [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1890.629074] env[62510]: DEBUG nova.compute.provider_tree [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1890.640700] env[62510]: DEBUG nova.scheduler.client.report [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1890.660992] env[62510]: DEBUG nova.scheduler.client.report [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1890.714278] env[62510]: DEBUG oslo_vmware.api [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769480, 'name': ReconfigVM_Task, 'duration_secs': 0.136058} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.714558] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-aed5d563-dd11-46d2-960f-56585fe49098 tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Reconfigured VM instance to set the machine id {{(pid=62510) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1890.775052] env[62510]: DEBUG nova.objects.instance [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'flavor' on Instance uuid bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1890.847996] env[62510]: DEBUG oslo_vmware.api [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769481, 'name': PowerOnVM_Task, 'duration_secs': 0.482065} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.848280] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1890.848504] env[62510]: INFO nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Took 9.17 seconds to spawn the instance on the hypervisor. [ 1890.848688] env[62510]: DEBUG nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1890.849487] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c03273-8552-4d8f-827b-81ab2a77a9bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.866783] env[62510]: DEBUG nova.network.neutron [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Successfully updated port: aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1890.890913] env[62510]: DEBUG nova.network.neutron [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updated VIF entry in instance network info cache for port 53f991bc-ec2f-434b-8943-f8e6d891b608. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1890.891291] env[62510]: DEBUG nova.network.neutron [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [{"id": "53f991bc-ec2f-434b-8943-f8e6d891b608", "address": "fa:16:3e:0e:3a:9b", "network": {"id": "7d511bb5-50cd-4a86-94d2-efb9fbf27e48", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-70351339-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "69511bceaf9c432c8819574d05584f09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53f991bc-ec", "ovs_interfaceid": "53f991bc-ec2f-434b-8943-f8e6d891b608", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1890.986013] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fcae3c-c2ff-46ff-b521-95085f50fa90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.994435] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d570971a-da63-45a8-b682-73fd873b344d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.025977] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5677fb-06cd-48ce-9362-bb915a5949ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.034560] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf12f11-9095-4cee-8da4-57b9bea4f69c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.040482] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1891.040699] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1891.040885] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Deleting the datastore file [datastore1] 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1891.041151] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf5843b7-c645-44a6-ad56-7a70ef5cacbc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.051046] env[62510]: DEBUG nova.compute.provider_tree [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1891.060162] env[62510]: DEBUG oslo_vmware.api [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for the task: (returnval){ [ 1891.060162] env[62510]: value = "task-1769483" [ 1891.060162] env[62510]: _type = "Task" [ 1891.060162] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.068110] env[62510]: DEBUG oslo_vmware.api [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769482, 'name': PowerOffVM_Task, 'duration_secs': 0.197966} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.068725] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1891.068941] env[62510]: DEBUG nova.compute.manager [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1891.069732] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7eac9e-3483-4abe-864d-2803b56e14fb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.076270] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.076521] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.076817] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.077086] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1891.077283] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.079175] env[62510]: DEBUG oslo_vmware.api [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.080265] env[62510]: DEBUG nova.compute.manager [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Received event network-changed-aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1891.080469] env[62510]: DEBUG nova.compute.manager [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Refreshing instance network info cache due to event network-changed-aa1b717d-79b9-457c-829a-a4e12f0187c4. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1891.080688] env[62510]: DEBUG oslo_concurrency.lockutils [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] Acquiring lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.080860] env[62510]: DEBUG oslo_concurrency.lockutils [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] Acquired lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.081142] env[62510]: DEBUG nova.network.neutron [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Refreshing network info cache for port aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1891.082192] env[62510]: INFO nova.compute.manager [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Terminating instance [ 1891.372336] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.375073] env[62510]: INFO nova.compute.manager [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Took 17.27 seconds to build instance. [ 1891.393993] env[62510]: DEBUG oslo_concurrency.lockutils [req-0433a459-ca88-4503-bdeb-6de28e2296ad req-47febc4f-6649-437a-9def-b2d43661d381 service nova] Releasing lock "refresh_cache-01204162-bf8e-46e0-bcf4-00df9ed7e7ce" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.571043] env[62510]: DEBUG oslo_vmware.api [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Task: {'id': task-1769483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215612} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.571495] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.571662] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.571835] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.572029] env[62510]: INFO nova.compute.manager [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Took 3.34 seconds to destroy the instance on the hypervisor. [ 1891.572288] env[62510]: DEBUG oslo.service.loopingcall [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.572503] env[62510]: DEBUG nova.compute.manager [-] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1891.572605] env[62510]: DEBUG nova.network.neutron [-] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1891.589459] env[62510]: DEBUG nova.compute.manager [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1891.589708] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1891.593036] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c9a9e4-9cd4-43be-b472-4ea1487dd0f5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.596879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ef9f2013-d70c-49cf-892e-9f1268b61524 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.070s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.598804] env[62510]: DEBUG nova.scheduler.client.report [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 142 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1891.599108] env[62510]: DEBUG nova.compute.provider_tree [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 142 to 143 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1891.599344] env[62510]: DEBUG nova.compute.provider_tree [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1891.610675] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1891.611797] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17a0498e-b307-45fd-b44b-b072db385db2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.624427] env[62510]: DEBUG oslo_vmware.api [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1891.624427] env[62510]: value = "task-1769484" [ 1891.624427] env[62510]: _type = "Task" [ 1891.624427] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.635547] env[62510]: DEBUG oslo_vmware.api [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.667313] env[62510]: DEBUG nova.network.neutron [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1891.783154] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d848be77-ea11-4cb2-9dba-9315a55955ad tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.265s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.877895] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c4e33127-2ffb-40cf-810e-61714a886033 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.799s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.924289] env[62510]: DEBUG nova.network.neutron [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.125433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.125752] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.125969] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.126184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.126355] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.128747] env[62510]: INFO nova.compute.manager [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Terminating instance [ 1892.141010] env[62510]: DEBUG oslo_vmware.api [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769484, 'name': PowerOffVM_Task, 'duration_secs': 0.263509} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.141758] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1892.141941] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1892.142201] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d428f858-70c4-4ffb-a78d-c4395e26545c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.427373] env[62510]: DEBUG oslo_concurrency.lockutils [req-633ec329-6798-447b-a20c-b15535b6268b req-2ecf5149-20d3-4fd1-a4e9-11fb68024467 service nova] Releasing lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.427834] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.427996] env[62510]: DEBUG nova.network.neutron [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1892.560281] env[62510]: DEBUG oslo_concurrency.lockutils [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.560586] env[62510]: DEBUG oslo_concurrency.lockutils [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.565018] env[62510]: DEBUG nova.network.neutron [-] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.565018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "c2be17de-175a-401f-8c53-f785aeecfff4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.565018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.565018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "c2be17de-175a-401f-8c53-f785aeecfff4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.565018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.565018] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.568810] env[62510]: INFO nova.compute.manager [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Terminating instance [ 1892.611046] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.391s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.611344] env[62510]: DEBUG nova.compute.manager [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62510) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5283}} [ 1892.614045] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.766s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.614283] env[62510]: DEBUG nova.objects.instance [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lazy-loading 'resources' on Instance uuid 82dceacf-1898-4d86-b1c6-552a24ab565f {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1892.621823] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1892.622260] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1892.622260] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Deleting the datastore file [datastore1] 01204162-bf8e-46e0-bcf4-00df9ed7e7ce {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1892.623065] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5be504d2-12cb-431a-abd6-f1ad3248fa95 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.631120] env[62510]: DEBUG oslo_vmware.api [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for the task: (returnval){ [ 1892.631120] env[62510]: value = "task-1769486" [ 1892.631120] env[62510]: _type = "Task" [ 1892.631120] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.635896] env[62510]: DEBUG nova.compute.manager [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1892.636388] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1892.637267] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4462cacd-c743-4a0f-835f-bd2f2689dcc3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.648063] env[62510]: DEBUG oslo_vmware.api [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.651289] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1892.651289] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bdf81db-5909-49d6-b3b0-591dbc0a775e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.659022] env[62510]: DEBUG oslo_vmware.api [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1892.659022] env[62510]: value = "task-1769487" [ 1892.659022] env[62510]: _type = "Task" [ 1892.659022] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.669488] env[62510]: DEBUG oslo_vmware.api [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.986395] env[62510]: DEBUG nova.network.neutron [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1893.012179] env[62510]: DEBUG nova.compute.manager [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-changed-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1893.012179] env[62510]: DEBUG nova.compute.manager [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing instance network info cache due to event network-changed-f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1893.012398] env[62510]: DEBUG oslo_concurrency.lockutils [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.012398] env[62510]: DEBUG oslo_concurrency.lockutils [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.012550] env[62510]: DEBUG nova.network.neutron [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing network info cache for port f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1893.064223] env[62510]: DEBUG nova.compute.utils [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.066351] env[62510]: INFO nova.compute.manager [-] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Took 1.49 seconds to deallocate network for instance. [ 1893.077034] env[62510]: DEBUG nova.compute.manager [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1893.077034] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1893.077980] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00507d98-94b2-4df9-8abd-f95882cbd9a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.087812] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1893.088448] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea87f783-f5ca-49d9-a511-a3045fe4ad7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.145502] env[62510]: DEBUG oslo_vmware.api [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Task: {'id': task-1769486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292805} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.148505] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1893.148854] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1893.149980] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1893.150202] env[62510]: INFO nova.compute.manager [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Took 1.56 seconds to destroy the instance on the hypervisor. [ 1893.150448] env[62510]: DEBUG oslo.service.loopingcall [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.153691] env[62510]: DEBUG nova.compute.manager [-] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1893.153792] env[62510]: DEBUG nova.network.neutron [-] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1893.169620] env[62510]: DEBUG oslo_vmware.api [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769487, 'name': PowerOffVM_Task, 'duration_secs': 0.220848} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.169889] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1893.170535] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1893.170823] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a84cd58c-b0d1-4da0-b641-5d18e1f663e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.182952] env[62510]: DEBUG nova.compute.manager [req-09dd512e-a041-458b-934e-d6e1438f922f req-4886a6a8-cfae-45c9-83d1-e5e244cbd497 service nova] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Received event network-vif-deleted-f9e5bc34-1b3a-416a-bb15-ce81423ee2a8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1893.185331] env[62510]: INFO nova.scheduler.client.report [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted allocation for migration 8a33b4da-7f27-4c88-9c51-4bbb9a0040a7 [ 1893.226255] env[62510]: DEBUG nova.network.neutron [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Updating instance_info_cache with network_info: [{"id": "aa1b717d-79b9-457c-829a-a4e12f0187c4", "address": "fa:16:3e:31:42:53", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1b717d-79", "ovs_interfaceid": "aa1b717d-79b9-457c-829a-a4e12f0187c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.247170] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1893.247425] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1893.247614] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleting the datastore file [datastore1] c2be17de-175a-401f-8c53-f785aeecfff4 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1893.247951] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9d8f65d-5dbc-4831-9729-e5085d4bc84a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.258266] env[62510]: DEBUG oslo_vmware.api [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1893.258266] env[62510]: value = "task-1769490" [ 1893.258266] env[62510]: _type = "Task" [ 1893.258266] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.267348] env[62510]: DEBUG oslo_vmware.api [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.299902] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1893.300164] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1893.300349] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleting the datastore file [datastore1] bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1893.300621] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13852745-120d-4322-b6ff-044c2cf531cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.314345] env[62510]: DEBUG oslo_vmware.api [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1893.314345] env[62510]: value = "task-1769491" [ 1893.314345] env[62510]: _type = "Task" [ 1893.314345] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.326254] env[62510]: DEBUG oslo_vmware.api [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.437053] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfdac717-c65a-4506-93fb-57f59096be28 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.445553] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f765d6-0a61-419c-9af7-db584ae5237d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.481669] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d3dc04-eb42-445a-8fab-34efd351f323 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.489621] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e21491-e9f7-4d4c-ab35-fb6fb917bc9e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.503612] env[62510]: DEBUG nova.compute.provider_tree [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1893.567757] env[62510]: DEBUG oslo_concurrency.lockutils [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.574015] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.691882] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6b9f057d-9ef6-4ad9-a0a9-a4f712e8260b tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 10.762s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.730634] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.730634] env[62510]: DEBUG nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance network_info: |[{"id": "aa1b717d-79b9-457c-829a-a4e12f0187c4", "address": "fa:16:3e:31:42:53", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1b717d-79", "ovs_interfaceid": "aa1b717d-79b9-457c-829a-a4e12f0187c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1893.730634] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:42:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa1b717d-79b9-457c-829a-a4e12f0187c4', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1893.737646] env[62510]: DEBUG oslo.service.loopingcall [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.738310] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1893.738651] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d02e112c-f37d-49d0-a259-734c9409feae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.762853] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1893.762853] env[62510]: value = "task-1769492" [ 1893.762853] env[62510]: _type = "Task" [ 1893.762853] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.769661] env[62510]: DEBUG oslo_vmware.api [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174646} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.770293] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1893.770489] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1893.770689] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1893.770867] env[62510]: INFO nova.compute.manager [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Took 0.69 seconds to destroy the instance on the hypervisor. [ 1893.771130] env[62510]: DEBUG oslo.service.loopingcall [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.771473] env[62510]: DEBUG nova.compute.manager [-] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1893.771562] env[62510]: DEBUG nova.network.neutron [-] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1893.775944] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769492, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.827811] env[62510]: DEBUG oslo_vmware.api [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151809} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.828138] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1893.828336] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1893.828513] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1893.828693] env[62510]: INFO nova.compute.manager [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1893.828939] env[62510]: DEBUG oslo.service.loopingcall [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.829156] env[62510]: DEBUG nova.compute.manager [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1893.829246] env[62510]: DEBUG nova.network.neutron [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1893.983609] env[62510]: DEBUG nova.network.neutron [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updated VIF entry in instance network info cache for port f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1893.984027] env[62510]: DEBUG nova.network.neutron [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.034729] env[62510]: ERROR nova.scheduler.client.report [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [req-ab5293b6-3159-4458-abf7-dcf18d8c0a2c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ab5293b6-3159-4458-abf7-dcf18d8c0a2c"}]} [ 1894.058306] env[62510]: DEBUG nova.scheduler.client.report [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1894.081905] env[62510]: DEBUG nova.scheduler.client.report [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1894.084018] env[62510]: DEBUG nova.compute.provider_tree [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1894.100020] env[62510]: DEBUG nova.scheduler.client.report [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1894.125427] env[62510]: DEBUG nova.scheduler.client.report [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1894.280692] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769492, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.479345] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da7ff01-f39d-4866-b54c-cfebcb2d1c05 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.488735] env[62510]: DEBUG oslo_concurrency.lockutils [req-7b71ba22-7593-49da-b6d0-9d8877637a34 req-2b9d8724-2173-4f8d-834c-1dd3fe04726a service nova] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.489989] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51e3b38-ad01-4bdb-b114-22b146063c5b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.524099] env[62510]: DEBUG nova.network.neutron [-] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.529935] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f706e9d4-7c63-49e5-8a44-60ea27099208 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.544598] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b9f52d-3824-40f6-a958-5f9c40b1c3cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.566401] env[62510]: DEBUG nova.compute.provider_tree [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1894.653711] env[62510]: DEBUG oslo_concurrency.lockutils [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.656959] env[62510]: DEBUG oslo_concurrency.lockutils [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.656959] env[62510]: INFO nova.compute.manager [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Attaching volume 31fc22b2-cf39-495c-b65c-15cd495e88de to /dev/sdb [ 1894.693480] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732e658d-d577-48a6-9039-6ceb35e66558 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.701404] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeddd22a-fd7c-4fd6-b832-839db0be1366 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.716698] env[62510]: DEBUG nova.virt.block_device [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating existing volume attachment record: 01ffe089-e707-4376-a1c4-d75453b28a6d {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1894.727165] env[62510]: DEBUG nova.network.neutron [-] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.780960] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769492, 'name': CreateVM_Task, 'duration_secs': 0.84366} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.781155] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1894.781870] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.782046] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.782371] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1894.782618] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c8eaa4e-b150-416c-a5fc-a6c0a1ac77e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.787271] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1894.787271] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523f98a6-5bd3-df60-1f3d-2c8c8a26a747" [ 1894.787271] env[62510]: _type = "Task" [ 1894.787271] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.795214] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523f98a6-5bd3-df60-1f3d-2c8c8a26a747, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.028213] env[62510]: INFO nova.compute.manager [-] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Took 1.87 seconds to deallocate network for instance. [ 1895.033194] env[62510]: DEBUG nova.objects.instance [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'flavor' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1895.103132] env[62510]: DEBUG nova.scheduler.client.report [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1895.103714] env[62510]: DEBUG nova.compute.provider_tree [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 144 to 145 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1895.103714] env[62510]: DEBUG nova.compute.provider_tree [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1895.160461] env[62510]: DEBUG nova.network.neutron [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.225341] env[62510]: DEBUG nova.compute.manager [req-8f5468c3-204a-404a-afd6-a393224b2dbb req-06da9635-adf1-4293-89c3-ee26a3045243 service nova] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Received event network-vif-deleted-53f991bc-ec2f-434b-8943-f8e6d891b608 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1895.231259] env[62510]: INFO nova.compute.manager [-] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Took 1.46 seconds to deallocate network for instance. [ 1895.247095] env[62510]: DEBUG nova.compute.manager [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1895.248028] env[62510]: DEBUG nova.compute.manager [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing instance network info cache due to event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1895.248028] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.248028] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.248028] env[62510]: DEBUG nova.network.neutron [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1895.299097] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523f98a6-5bd3-df60-1f3d-2c8c8a26a747, 'name': SearchDatastore_Task, 'duration_secs': 0.010217} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.299464] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.299736] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1895.300024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.300211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.300432] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1895.300724] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a527fee-c5e6-4791-8166-d3454551c951 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.310794] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1895.310987] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1895.311709] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13fa7de6-ebee-4bc3-994d-6431c9691c10 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.318352] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1895.318352] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f8ef63-b258-30bc-67c3-c4b51d0f4b5c" [ 1895.318352] env[62510]: _type = "Task" [ 1895.318352] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.326333] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f8ef63-b258-30bc-67c3-c4b51d0f4b5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.541366] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.542295] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.542466] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.542633] env[62510]: DEBUG nova.network.neutron [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1895.542810] env[62510]: DEBUG nova.objects.instance [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'info_cache' on Instance uuid 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1895.610723] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.997s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.614745] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.206s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.615080] env[62510]: DEBUG nova.objects.instance [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'resources' on Instance uuid cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1895.634394] env[62510]: INFO nova.scheduler.client.report [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted allocations for instance 82dceacf-1898-4d86-b1c6-552a24ab565f [ 1895.665908] env[62510]: INFO nova.compute.manager [-] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Took 1.84 seconds to deallocate network for instance. [ 1895.736059] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.831235] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f8ef63-b258-30bc-67c3-c4b51d0f4b5c, 'name': SearchDatastore_Task, 'duration_secs': 0.009077} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.832048] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e64836c-8a1c-4ede-9d35-90769d9a671c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.837920] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1895.837920] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52843263-dfde-2a3e-9682-0338b16c73bd" [ 1895.837920] env[62510]: _type = "Task" [ 1895.837920] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.849124] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52843263-dfde-2a3e-9682-0338b16c73bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.958762] env[62510]: DEBUG nova.network.neutron [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updated VIF entry in instance network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1895.959194] env[62510]: DEBUG nova.network.neutron [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.046445] env[62510]: DEBUG nova.objects.base [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Object Instance<841460b0-d917-44ea-88c6-0e5a3022f658> lazy-loaded attributes: flavor,info_cache {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1896.118774] env[62510]: DEBUG nova.objects.instance [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'numa_topology' on Instance uuid cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1896.141324] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c2cee24b-1857-4cfd-a2af-aa21c32bb3d8 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "82dceacf-1898-4d86-b1c6-552a24ab565f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.988s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.172507] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.358046] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52843263-dfde-2a3e-9682-0338b16c73bd, 'name': SearchDatastore_Task, 'duration_secs': 0.011851} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.358046] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.358046] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1896.358046] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ec3dd2a-c4c5-4980-99eb-c383221c4f81 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.364841] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1896.364841] env[62510]: value = "task-1769496" [ 1896.364841] env[62510]: _type = "Task" [ 1896.364841] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.375896] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.462768] env[62510]: DEBUG oslo_concurrency.lockutils [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.462768] env[62510]: DEBUG nova.compute.manager [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Received event network-vif-deleted-5a0b680e-7c0d-4008-91b8-216bc3b9da1e {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1896.462768] env[62510]: DEBUG nova.compute.manager [req-3e7bd61f-d3a0-48bb-8fe7-6a5ccc2d8f1c req-699a6a85-bc11-4e49-b029-02c2d4aa60c4 service nova] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Received event network-vif-deleted-4568ba9b-dd3d-4796-bcfc-7bf80545a66b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1896.623996] env[62510]: DEBUG nova.objects.base [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1896.881654] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769496, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.901603] env[62510]: DEBUG nova.network.neutron [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [{"id": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "address": "fa:16:3e:47:48:b1", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5992dff8-03", "ovs_interfaceid": "5992dff8-0336-4d13-bbe8-2614b9dc96d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.970651] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7f2086-a070-47d2-a932-431261e2a68c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.985817] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58180566-461c-42df-a8e3-e2088d47d934 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.032979] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eecb1d5-cdfe-435a-981c-ea0371fb8151 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.040019] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd60685f-37a0-4470-8ae3-045127f8dd40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.058243] env[62510]: DEBUG nova.compute.provider_tree [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.093724] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.093962] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.098031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.098253] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.121251] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.121496] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.275358] env[62510]: DEBUG nova.compute.manager [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1897.275358] env[62510]: DEBUG nova.compute.manager [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing instance network info cache due to event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1897.275358] env[62510]: DEBUG oslo_concurrency.lockutils [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.275479] env[62510]: DEBUG oslo_concurrency.lockutils [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.275639] env[62510]: DEBUG nova.network.neutron [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1897.377096] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576148} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.377358] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1897.377574] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1897.377896] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-885580ce-2154-4489-9636-dddf831e1760 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.384662] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1897.384662] env[62510]: value = "task-1769498" [ 1897.384662] env[62510]: _type = "Task" [ 1897.384662] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.395113] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769498, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.407925] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-841460b0-d917-44ea-88c6-0e5a3022f658" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.562867] env[62510]: DEBUG nova.scheduler.client.report [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1897.596704] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1897.600509] env[62510]: DEBUG nova.compute.utils [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1897.623465] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1897.894865] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769498, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074995} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.895227] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1897.896089] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c64a5e7-17f2-4a0a-9e28-c5b1130f908f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.921074] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1897.925366] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b46fcaf8-b450-4329-8771-3b66dcca3ce8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.946827] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1897.946827] env[62510]: value = "task-1769499" [ 1897.946827] env[62510]: _type = "Task" [ 1897.946827] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.955841] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.006795] env[62510]: DEBUG nova.network.neutron [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updated VIF entry in instance network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1898.007180] env[62510]: DEBUG nova.network.neutron [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.069780] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.455s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.073117] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.283s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.073117] env[62510]: DEBUG nova.objects.instance [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'resources' on Instance uuid 0c93a909-d08f-466c-bdef-a26fa35cd944 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1898.102513] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.004s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.118171] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.141128] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.424059] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1898.424059] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b62d77a-6208-4115-81ff-3fee465365cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.431883] env[62510]: DEBUG oslo_vmware.api [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1898.431883] env[62510]: value = "task-1769500" [ 1898.431883] env[62510]: _type = "Task" [ 1898.431883] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.440939] env[62510]: DEBUG oslo_vmware.api [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769500, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.455609] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769499, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.510579] env[62510]: DEBUG oslo_concurrency.lockutils [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.510899] env[62510]: DEBUG nova.compute.manager [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-changed-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1898.511116] env[62510]: DEBUG nova.compute.manager [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing instance network info cache due to event network-changed-f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1898.511371] env[62510]: DEBUG oslo_concurrency.lockutils [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.511534] env[62510]: DEBUG oslo_concurrency.lockutils [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.511741] env[62510]: DEBUG nova.network.neutron [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing network info cache for port f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1898.584660] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e366c716-e6e8-4ea3-9a96-cb623efbcb31 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 37.518s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.585722] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 8.363s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.586383] env[62510]: INFO nova.compute.manager [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Unshelving [ 1898.852308] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41841aef-25d8-4d1c-bfe7-4ae697f75778 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.864876] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2cd149-2792-4346-9292-05c31f88348a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.895597] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df77d0f-dc7f-4dd8-8dd4-dc62876adf77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.903851] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ca692d-23c7-4e40-9270-75bf0cfebe23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.917165] env[62510]: DEBUG nova.compute.provider_tree [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.950844] env[62510]: DEBUG oslo_vmware.api [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769500, 'name': PowerOnVM_Task, 'duration_secs': 0.399267} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.954659] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1898.954948] env[62510]: DEBUG nova.compute.manager [None req-2130d4c3-f107-4dec-8a88-50d0074e1a42 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1898.955867] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedb2a58-dbc7-4f25-8358-7e5d9a3eea78 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.964755] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769499, 'name': ReconfigVM_Task, 'duration_secs': 0.723498} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.966501] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1898.970113] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ccb1ced-5b94-43c4-9ac9-428ab817948f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.977845] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1898.977845] env[62510]: value = "task-1769501" [ 1898.977845] env[62510]: _type = "Task" [ 1898.977845] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.986343] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769501, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.177259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.177259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.177259] env[62510]: INFO nova.compute.manager [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Attaching volume 1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a to /dev/sdb [ 1899.210703] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25edf4a8-3918-4e11-80c4-54188886b349 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.222078] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ece7f7f-e4f4-49cb-9aec-d405735088e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.244574] env[62510]: DEBUG nova.virt.block_device [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Updating existing volume attachment record: 110b2822-5238-470a-b4ee-f9817a19fedb {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1899.263448] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1899.263565] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367477', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'name': 'volume-31fc22b2-cf39-495c-b65c-15cd495e88de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5f229f78-6c5d-4170-bdd4-c5522b137949', 'attached_at': '', 'detached_at': '', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'serial': '31fc22b2-cf39-495c-b65c-15cd495e88de'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1899.265364] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe79b32-583c-4ed7-a9ed-fe3fd96688d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.283476] env[62510]: DEBUG nova.network.neutron [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updated VIF entry in instance network info cache for port f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1899.283829] env[62510]: DEBUG nova.network.neutron [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.285582] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7039b0d7-5e04-4aa9-bb74-3d1f27083bf3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.312950] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] volume-31fc22b2-cf39-495c-b65c-15cd495e88de/volume-31fc22b2-cf39-495c-b65c-15cd495e88de.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1899.313941] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea440b17-b593-45cb-985d-aaace1089ed2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.339056] env[62510]: DEBUG oslo_vmware.api [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1899.339056] env[62510]: value = "task-1769502" [ 1899.339056] env[62510]: _type = "Task" [ 1899.339056] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.347327] env[62510]: DEBUG oslo_vmware.api [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769502, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.420139] env[62510]: DEBUG nova.scheduler.client.report [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1899.489128] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769501, 'name': Rename_Task, 'duration_secs': 0.151973} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.490071] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1899.490394] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb1293a1-d507-43e0-833d-bfd750eacbf2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.496995] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1899.496995] env[62510]: value = "task-1769506" [ 1899.496995] env[62510]: _type = "Task" [ 1899.496995] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.510344] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.617304] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.732365] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "841460b0-d917-44ea-88c6-0e5a3022f658" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.732766] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.733118] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.733436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.733709] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.736231] env[62510]: INFO nova.compute.manager [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Terminating instance [ 1899.789436] env[62510]: DEBUG oslo_concurrency.lockutils [req-e6b23ca5-2657-4fd4-9a2e-4f9ac0f5b7ca req-172e8afb-dbee-44c1-98de-1537409645ac service nova] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.850276] env[62510]: DEBUG oslo_vmware.api [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769502, 'name': ReconfigVM_Task, 'duration_secs': 0.429971} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.850541] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfigured VM instance instance-00000061 to attach disk [datastore1] volume-31fc22b2-cf39-495c-b65c-15cd495e88de/volume-31fc22b2-cf39-495c-b65c-15cd495e88de.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1899.855311] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bff47e9e-a3de-4dcb-b8cf-c6ca2104603c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.870266] env[62510]: DEBUG oslo_vmware.api [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1899.870266] env[62510]: value = "task-1769507" [ 1899.870266] env[62510]: _type = "Task" [ 1899.870266] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.878683] env[62510]: DEBUG oslo_vmware.api [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769507, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.929070] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.932035] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.357s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.932035] env[62510]: DEBUG nova.objects.instance [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lazy-loading 'resources' on Instance uuid 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1899.951016] env[62510]: INFO nova.scheduler.client.report [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted allocations for instance 0c93a909-d08f-466c-bdef-a26fa35cd944 [ 1900.008357] env[62510]: DEBUG oslo_vmware.api [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769506, 'name': PowerOnVM_Task, 'duration_secs': 0.481874} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.008735] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1900.008951] env[62510]: INFO nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Took 10.72 seconds to spawn the instance on the hypervisor. [ 1900.009185] env[62510]: DEBUG nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1900.009969] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e664439-ab1b-43ce-a9cc-4420a02dc140 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.228628] env[62510]: DEBUG oslo_concurrency.lockutils [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.228916] env[62510]: DEBUG oslo_concurrency.lockutils [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.240178] env[62510]: DEBUG nova.compute.manager [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1900.240378] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1900.241745] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6d01c0-cf3f-4b2c-bdd5-be2cf8951c7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.249740] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1900.249995] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9645e545-3a00-44a5-bbfc-9e72bf243963 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.257477] env[62510]: DEBUG oslo_vmware.api [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1900.257477] env[62510]: value = "task-1769508" [ 1900.257477] env[62510]: _type = "Task" [ 1900.257477] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.266722] env[62510]: DEBUG oslo_vmware.api [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.382022] env[62510]: DEBUG oslo_vmware.api [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769507, 'name': ReconfigVM_Task, 'duration_secs': 0.487162} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.382022] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367477', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'name': 'volume-31fc22b2-cf39-495c-b65c-15cd495e88de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5f229f78-6c5d-4170-bdd4-c5522b137949', 'attached_at': '', 'detached_at': '', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'serial': '31fc22b2-cf39-495c-b65c-15cd495e88de'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1900.462637] env[62510]: DEBUG oslo_concurrency.lockutils [None req-08008098-e99e-48d0-b37a-a5e1d914f75b tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "0c93a909-d08f-466c-bdef-a26fa35cd944" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.687s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.531670] env[62510]: INFO nova.compute.manager [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Took 16.87 seconds to build instance. [ 1900.710576] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c42f63e-5fe7-4858-8641-d0ccd9f7e36d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.725499] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a2d5cf-b99f-475e-ba06-9586ad9c7e10 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.734173] env[62510]: DEBUG nova.compute.utils [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1900.767394] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5d093d-faba-4cf5-8cd0-da24b5aba1d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.777781] env[62510]: DEBUG oslo_vmware.api [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769508, 'name': PowerOffVM_Task, 'duration_secs': 0.200677} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.779131] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1900.779421] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1900.779767] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0dbb6810-8324-4627-a82f-0662af84748f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.782443] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b1b79a-bcd1-4bf1-959b-050a94e639c7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.797610] env[62510]: DEBUG nova.compute.provider_tree [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.861740] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1900.862040] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1900.862234] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleting the datastore file [datastore1] 841460b0-d917-44ea-88c6-0e5a3022f658 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1900.862638] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b5eb20b-e163-4248-88e6-6bf08f539ea6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.870412] env[62510]: DEBUG oslo_vmware.api [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1900.870412] env[62510]: value = "task-1769510" [ 1900.870412] env[62510]: _type = "Task" [ 1900.870412] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.879735] env[62510]: DEBUG oslo_vmware.api [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.033486] env[62510]: DEBUG oslo_concurrency.lockutils [None req-968749af-f51a-49df-95fa-59f8c105995d tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "9956e5d2-edda-47af-a3df-743ebed1154b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.388s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.265164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.036s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.300628] env[62510]: DEBUG nova.scheduler.client.report [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1901.381529] env[62510]: DEBUG oslo_vmware.api [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.419198] env[62510]: DEBUG nova.objects.instance [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lazy-loading 'flavor' on Instance uuid 5f229f78-6c5d-4170-bdd4-c5522b137949 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1901.667870] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.668185] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.806631] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.809255] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.268s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.809514] env[62510]: DEBUG nova.objects.instance [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lazy-loading 'resources' on Instance uuid 01204162-bf8e-46e0-bcf4-00df9ed7e7ce {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1901.845403] env[62510]: INFO nova.scheduler.client.report [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Deleted allocations for instance 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8 [ 1901.886028] env[62510]: DEBUG oslo_vmware.api [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.724041} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.886028] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1901.886028] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1901.886028] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1901.886028] env[62510]: INFO nova.compute.manager [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1901.886028] env[62510]: DEBUG oslo.service.loopingcall [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.886028] env[62510]: DEBUG nova.compute.manager [-] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1901.886028] env[62510]: DEBUG nova.network.neutron [-] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1901.925141] env[62510]: DEBUG oslo_concurrency.lockutils [None req-303c9717-c050-45a8-94c4-b60329b99bd8 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.271s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.174345] env[62510]: DEBUG nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1902.209583] env[62510]: INFO nova.compute.manager [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Rescuing [ 1902.209938] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.210122] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.210272] env[62510]: DEBUG nova.network.neutron [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.310194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.310194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.310194] env[62510]: INFO nova.compute.manager [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Attaching volume da118d85-cc0e-4bcc-a73e-481ec7c76562 to /dev/sdb [ 1902.344360] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27defb84-5120-4252-9fbc-31a74d9bb210 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.352440] env[62510]: DEBUG oslo_concurrency.lockutils [None req-746ef766-83c2-4cb1-b4a6-e64b71c42dc1 tempest-ServerAddressesTestJSON-1715843609 tempest-ServerAddressesTestJSON-1715843609-project-member] Lock "5cae60b1-c0b1-4ff4-baf9-b8d1885614e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.633s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.354691] env[62510]: DEBUG nova.compute.manager [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Received event network-changed-aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1902.354897] env[62510]: DEBUG nova.compute.manager [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Refreshing instance network info cache due to event network-changed-aa1b717d-79b9-457c-829a-a4e12f0187c4. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1902.355124] env[62510]: DEBUG oslo_concurrency.lockutils [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] Acquiring lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.355480] env[62510]: DEBUG oslo_concurrency.lockutils [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] Acquired lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.355480] env[62510]: DEBUG nova.network.neutron [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Refreshing network info cache for port aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1902.369038] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a2d77d-4e9b-439f-be23-caa667ec2dcc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.387014] env[62510]: DEBUG nova.virt.block_device [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updating existing volume attachment record: aa8313ef-490c-4adf-9e43-9a0bee04403c {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1902.392913] env[62510]: DEBUG nova.compute.manager [req-c509175c-7388-401a-a36e-e3bb81fc1885 req-b706e7f4-72e6-4421-9bca-230d08dbe2aa service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Received event network-vif-deleted-5992dff8-0336-4d13-bbe8-2614b9dc96d5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1902.393142] env[62510]: INFO nova.compute.manager [req-c509175c-7388-401a-a36e-e3bb81fc1885 req-b706e7f4-72e6-4421-9bca-230d08dbe2aa service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Neutron deleted interface 5992dff8-0336-4d13-bbe8-2614b9dc96d5; detaching it from the instance and deleting it from the info cache [ 1902.393285] env[62510]: DEBUG nova.network.neutron [req-c509175c-7388-401a-a36e-e3bb81fc1885 req-b706e7f4-72e6-4421-9bca-230d08dbe2aa service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.605177] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39365717-133f-428c-bead-5d3a0a5722cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.617579] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c79913-9093-49e1-a24d-4ad6bf6db3a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.661023] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d53720-39d6-4647-af5a-2392fddd585d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.667497] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d749e9d-d3e4-4b9b-a152-62cf8f23c114 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.684667] env[62510]: DEBUG nova.compute.provider_tree [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1902.700699] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.873035] env[62510]: DEBUG nova.network.neutron [-] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.898583] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f03db1a-2850-43e6-bb86-e34796214139 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.912538] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd01b39-6780-4b3e-ad05-b7d6b2d6b253 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.959021] env[62510]: DEBUG nova.compute.manager [req-c509175c-7388-401a-a36e-e3bb81fc1885 req-b706e7f4-72e6-4421-9bca-230d08dbe2aa service nova] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Detach interface failed, port_id=5992dff8-0336-4d13-bbe8-2614b9dc96d5, reason: Instance 841460b0-d917-44ea-88c6-0e5a3022f658 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1903.082150] env[62510]: DEBUG nova.network.neutron [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.187858] env[62510]: DEBUG nova.scheduler.client.report [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1903.312138] env[62510]: DEBUG nova.network.neutron [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Updated VIF entry in instance network info cache for port aa1b717d-79b9-457c-829a-a4e12f0187c4. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1903.313113] env[62510]: DEBUG nova.network.neutron [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Updating instance_info_cache with network_info: [{"id": "aa1b717d-79b9-457c-829a-a4e12f0187c4", "address": "fa:16:3e:31:42:53", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1b717d-79", "ovs_interfaceid": "aa1b717d-79b9-457c-829a-a4e12f0187c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.375827] env[62510]: INFO nova.compute.manager [-] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Took 1.49 seconds to deallocate network for instance. [ 1903.583173] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.693763] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.696267] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.960s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.696571] env[62510]: DEBUG nova.objects.instance [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'resources' on Instance uuid c2be17de-175a-401f-8c53-f785aeecfff4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1903.714245] env[62510]: INFO nova.scheduler.client.report [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Deleted allocations for instance 01204162-bf8e-46e0-bcf4-00df9ed7e7ce [ 1903.792156] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1903.792834] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367479', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'name': 'volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3', 'attached_at': '', 'detached_at': '', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'serial': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1903.795167] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dae3f6e-6294-4c05-860b-98e9acbec86c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.821324] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ccf921-986f-45a6-98ce-dc76366c8f0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.824238] env[62510]: DEBUG oslo_concurrency.lockutils [req-aa9c1ef5-1169-41dc-9362-bb45df99df1d req-49c41ecd-f6cc-44e9-ad14-5e8ca41d9023 service nova] Releasing lock "refresh_cache-9956e5d2-edda-47af-a3df-743ebed1154b" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.852194] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a/volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1903.853168] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d304477-9f49-4985-b43c-1da4ef277c3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.873925] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1903.873925] env[62510]: value = "task-1769515" [ 1903.873925] env[62510]: _type = "Task" [ 1903.873925] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.883748] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.884148] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.224125] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5cfa5d37-e9cf-40ef-a6b5-2e6f13689f6a tempest-AttachInterfacesUnderV243Test-1105012315 tempest-AttachInterfacesUnderV243Test-1105012315-project-member] Lock "01204162-bf8e-46e0-bcf4-00df9ed7e7ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.147s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.388392] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769515, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.511765] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc4d119-5011-45c8-aba6-702f4edd0056 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.523018] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b376b13-75a3-41ee-bdbb-c68eab2a436e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.563560] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c3bcc5-e005-4115-8207-be174e2dadea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.575786] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b1f051-7cd3-4945-8499-985357d81fab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.593271] env[62510]: DEBUG nova.compute.provider_tree [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.885545] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769515, 'name': ReconfigVM_Task, 'duration_secs': 0.67932} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.885650] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a/volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1904.890606] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54533b6b-6e96-427d-9a47-6be755e6a6f1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.907217] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1904.907217] env[62510]: value = "task-1769517" [ 1904.907217] env[62510]: _type = "Task" [ 1904.907217] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.915825] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769517, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.069768] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-98a986f8-1515-4f07-aee2-94ce84796db0" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.070058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-98a986f8-1515-4f07-aee2-94ce84796db0" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.070575] env[62510]: DEBUG nova.objects.instance [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'flavor' on Instance uuid 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1905.098182] env[62510]: DEBUG nova.scheduler.client.report [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1905.125560] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1905.125950] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ffe89cda-92d9-498c-b600-54d1c3ead717 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.134212] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1905.134212] env[62510]: value = "task-1769518" [ 1905.134212] env[62510]: _type = "Task" [ 1905.134212] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.143027] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769518, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.419079] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.603508] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.907s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.609164] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.437s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.610515] env[62510]: DEBUG nova.objects.instance [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'resources' on Instance uuid bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1905.642633] env[62510]: INFO nova.scheduler.client.report [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted allocations for instance c2be17de-175a-401f-8c53-f785aeecfff4 [ 1905.652826] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769518, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.740610] env[62510]: DEBUG nova.objects.instance [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'pci_requests' on Instance uuid 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1905.925189] env[62510]: DEBUG oslo_vmware.api [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769517, 'name': ReconfigVM_Task, 'duration_secs': 0.761358} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.925580] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367479', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'name': 'volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3', 'attached_at': '', 'detached_at': '', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'serial': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1906.146453] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769518, 'name': PowerOffVM_Task, 'duration_secs': 0.662589} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.149127] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1906.152113] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc3dabc-6df4-4a51-be97-f0327855028c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.154967] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ad82c335-a51c-423c-8374-d0b22ea046f3 tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "c2be17de-175a-401f-8c53-f785aeecfff4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.591s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.179586] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6d13aa-aa51-45ae-ad74-8de36965337d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.215567] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1906.216232] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f12b88f-9fa6-4de9-a496-8a84cd04b232 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.225224] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1906.225224] env[62510]: value = "task-1769519" [ 1906.225224] env[62510]: _type = "Task" [ 1906.225224] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.236902] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1906.237132] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1906.237407] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.238089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.238089] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1906.238089] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb76fe8c-b0a4-4072-b9ca-5519bb3d4777 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.247963] env[62510]: DEBUG nova.objects.base [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Object Instance<0f6e9363-47ac-481e-bc1c-b8f4f9748d9c> lazy-loaded attributes: flavor,pci_requests {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1906.248211] env[62510]: DEBUG nova.network.neutron [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1906.252895] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1906.253095] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1906.257367] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b986c644-20a9-441a-9418-a77258fe0f00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.266803] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1906.266803] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5211b1ce-9658-1642-aa03-1fd423c35446" [ 1906.266803] env[62510]: _type = "Task" [ 1906.266803] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.275948] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5211b1ce-9658-1642-aa03-1fd423c35446, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.342636] env[62510]: DEBUG nova.policy [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1906.419694] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca4efd9-ed49-43b8-96d9-83198a90666a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.428388] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4486e082-afaf-48ac-a682-4782d4460061 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.464221] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7728dedb-5e9f-4bcc-b9ae-c06513d65c3e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.472740] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00befc7-d162-45b9-9750-c7d881a78b1e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.489130] env[62510]: DEBUG nova.compute.provider_tree [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1906.778978] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5211b1ce-9658-1642-aa03-1fd423c35446, 'name': SearchDatastore_Task, 'duration_secs': 0.011348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.779884] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d94923dd-8764-4f72-a208-2c449859b3ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.786541] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1906.786541] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5297db89-e54e-3bd8-1cf3-0501749bc541" [ 1906.786541] env[62510]: _type = "Task" [ 1906.786541] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.794951] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5297db89-e54e-3bd8-1cf3-0501749bc541, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.935579] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1906.936121] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367481', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'name': 'volume-da118d85-cc0e-4bcc-a73e-481ec7c76562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '92cb4e54-a00e-4974-b134-22d302932e32', 'attached_at': '', 'detached_at': '', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'serial': 'da118d85-cc0e-4bcc-a73e-481ec7c76562'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1906.937930] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a766f2-5679-4bd8-9f81-4e405beaff50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.954713] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8899c5df-2e05-40aa-b924-95b2a9a87642 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.981537] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] volume-da118d85-cc0e-4bcc-a73e-481ec7c76562/volume-da118d85-cc0e-4bcc-a73e-481ec7c76562.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1906.982416] env[62510]: DEBUG nova.objects.instance [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'flavor' on Instance uuid 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1906.983643] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43e93de5-bc73-4e12-9831-9fdbc666e359 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.999565] env[62510]: DEBUG nova.scheduler.client.report [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1907.003328] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1cb4b189-7a1e-4ed3-8ea4-399037f34a21 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.828s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.010756] env[62510]: DEBUG oslo_vmware.api [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1907.010756] env[62510]: value = "task-1769520" [ 1907.010756] env[62510]: _type = "Task" [ 1907.010756] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.021148] env[62510]: DEBUG oslo_vmware.api [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769520, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.303154] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5297db89-e54e-3bd8-1cf3-0501749bc541, 'name': SearchDatastore_Task, 'duration_secs': 0.036312} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.303154] env[62510]: DEBUG oslo_concurrency.lockutils [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.303154] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. {{(pid=62510) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1907.303154] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4c6939c-e350-4838-addd-78a8ff8dc983 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.311594] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1907.311594] env[62510]: value = "task-1769521" [ 1907.311594] env[62510]: _type = "Task" [ 1907.311594] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.320887] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.400696] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.401082] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.401362] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.401583] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.401852] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.404475] env[62510]: INFO nova.compute.manager [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Terminating instance [ 1907.504343] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.508386] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.390s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.510462] env[62510]: INFO nova.compute.claims [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1907.534028] env[62510]: DEBUG oslo_vmware.api [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769520, 'name': ReconfigVM_Task, 'duration_secs': 0.455058} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.538841] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Reconfigured VM instance instance-00000063 to attach disk [datastore1] volume-da118d85-cc0e-4bcc-a73e-481ec7c76562/volume-da118d85-cc0e-4bcc-a73e-481ec7c76562.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1907.544465] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8736572-02b9-4bff-b2f8-d1411c67f670 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.558711] env[62510]: INFO nova.scheduler.client.report [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted allocations for instance bc474f8b-dd3b-4d7a-a8e0-fea5570b3091 [ 1907.576289] env[62510]: DEBUG oslo_vmware.api [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1907.576289] env[62510]: value = "task-1769522" [ 1907.576289] env[62510]: _type = "Task" [ 1907.576289] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.599876] env[62510]: DEBUG oslo_vmware.api [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769522, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.827609] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769521, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.909053] env[62510]: DEBUG nova.compute.manager [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1907.909402] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1907.910275] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1b6bf5-1014-40b8-96f5-ae7ddc3a01d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.918857] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1907.919158] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9115de30-04ed-4496-9ead-ea5dbff27160 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.925200] env[62510]: DEBUG oslo_vmware.api [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1907.925200] env[62510]: value = "task-1769523" [ 1907.925200] env[62510]: _type = "Task" [ 1907.925200] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.926082] env[62510]: DEBUG nova.network.neutron [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Successfully updated port: 98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1907.939239] env[62510]: DEBUG oslo_vmware.api [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.059980] env[62510]: DEBUG nova.compute.manager [req-229c5eec-5ee1-488e-b415-563ea38af9e5 req-7f863757-3722-44b6-b8e9-0b067dbc848f service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-vif-plugged-98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1908.060393] env[62510]: DEBUG oslo_concurrency.lockutils [req-229c5eec-5ee1-488e-b415-563ea38af9e5 req-7f863757-3722-44b6-b8e9-0b067dbc848f service nova] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.060631] env[62510]: DEBUG oslo_concurrency.lockutils [req-229c5eec-5ee1-488e-b415-563ea38af9e5 req-7f863757-3722-44b6-b8e9-0b067dbc848f service nova] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1908.060808] env[62510]: DEBUG oslo_concurrency.lockutils [req-229c5eec-5ee1-488e-b415-563ea38af9e5 req-7f863757-3722-44b6-b8e9-0b067dbc848f service nova] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.063319] env[62510]: DEBUG nova.compute.manager [req-229c5eec-5ee1-488e-b415-563ea38af9e5 req-7f863757-3722-44b6-b8e9-0b067dbc848f service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] No waiting events found dispatching network-vif-plugged-98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1908.063501] env[62510]: WARNING nova.compute.manager [req-229c5eec-5ee1-488e-b415-563ea38af9e5 req-7f863757-3722-44b6-b8e9-0b067dbc848f service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received unexpected event network-vif-plugged-98a986f8-1515-4f07-aee2-94ce84796db0 for instance with vm_state active and task_state None. [ 1908.072609] env[62510]: DEBUG oslo_concurrency.lockutils [None req-465a17e7-03d1-4b84-a175-c7fb688fd5ff tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "bc474f8b-dd3b-4d7a-a8e0-fea5570b3091" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.947s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.090158] env[62510]: DEBUG oslo_vmware.api [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769522, 'name': ReconfigVM_Task, 'duration_secs': 0.346454} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.090158] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367481', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'name': 'volume-da118d85-cc0e-4bcc-a73e-481ec7c76562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '92cb4e54-a00e-4974-b134-22d302932e32', 'attached_at': '', 'detached_at': '', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'serial': 'da118d85-cc0e-4bcc-a73e-481ec7c76562'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1908.327031] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568381} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.327031] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. [ 1908.327031] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38366ec6-31d0-40cf-ab0a-8653f216f3de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.359855] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1908.360398] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db740321-ddef-4aba-95ea-f8f313669e04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.381199] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1908.381199] env[62510]: value = "task-1769524" [ 1908.381199] env[62510]: _type = "Task" [ 1908.381199] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.391163] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.436037] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.436310] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.436541] env[62510]: DEBUG nova.network.neutron [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1908.438477] env[62510]: DEBUG oslo_vmware.api [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769523, 'name': PowerOffVM_Task, 'duration_secs': 0.422468} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.438477] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1908.438710] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1908.439130] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8a2b944-71bb-4a7f-bac9-25b6c52ab2e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.771602] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e59615e-c504-4e08-9ac3-c203a96093d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.781808] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2183a071-2322-40e5-85e1-c8277c32d7c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.818797] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c5373c-ed65-43d8-a9e4-3eb664526c47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.828445] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5707af-0b0c-4186-8209-bbb9a3c20594 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.844954] env[62510]: DEBUG nova.compute.provider_tree [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1908.891890] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769524, 'name': ReconfigVM_Task, 'duration_secs': 0.379833} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.892203] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1908.893091] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602e087a-3986-4020-8625-1950a47523d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.922169] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae88dd2d-4d41-4f6f-b149-0ecc668c79ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.942388] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1908.942388] env[62510]: value = "task-1769526" [ 1908.942388] env[62510]: _type = "Task" [ 1908.942388] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.957737] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.986197] env[62510]: WARNING nova.network.neutron [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] 940be04f-b555-4383-aaf8-63734d94a773 already exists in list: networks containing: ['940be04f-b555-4383-aaf8-63734d94a773']. ignoring it [ 1909.148016] env[62510]: DEBUG nova.objects.instance [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1909.222185] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1909.222185] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1909.222317] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleting the datastore file [datastore1] 8ffa27e9-6a3b-48d1-aed4-c808089788d9 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1909.222592] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae2c8605-4512-49e8-b328-a2458959b8c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.232465] env[62510]: DEBUG oslo_vmware.api [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for the task: (returnval){ [ 1909.232465] env[62510]: value = "task-1769527" [ 1909.232465] env[62510]: _type = "Task" [ 1909.232465] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.245721] env[62510]: DEBUG oslo_vmware.api [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769527, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.348266] env[62510]: DEBUG nova.scheduler.client.report [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1909.453708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "11490e72-b9a5-4e8e-86c4-300c594cd914" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.454207] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.463636] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.480345] env[62510]: DEBUG nova.network.neutron [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98a986f8-1515-4f07-aee2-94ce84796db0", "address": "fa:16:3e:c5:ae:21", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98a986f8-15", "ovs_interfaceid": "98a986f8-1515-4f07-aee2-94ce84796db0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.654099] env[62510]: DEBUG oslo_concurrency.lockutils [None req-16e5a4e7-9543-46aa-af41-c6b754f05b32 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.347s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.743284] env[62510]: DEBUG oslo_vmware.api [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Task: {'id': task-1769527, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374502} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.743558] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1909.743746] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1909.743940] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1909.745139] env[62510]: INFO nova.compute.manager [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1909.745139] env[62510]: DEBUG oslo.service.loopingcall [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1909.745139] env[62510]: DEBUG nova.compute.manager [-] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1909.745139] env[62510]: DEBUG nova.network.neutron [-] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1909.854520] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.346s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.855018] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1909.861123] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.720s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.862854] env[62510]: INFO nova.compute.claims [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1909.941592] env[62510]: DEBUG oslo_concurrency.lockutils [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.941592] env[62510]: DEBUG oslo_concurrency.lockutils [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.941592] env[62510]: DEBUG nova.compute.manager [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1909.941858] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1be427-9b97-4a50-ba0f-ebe87ce118a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.949583] env[62510]: DEBUG nova.compute.manager [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1909.950306] env[62510]: DEBUG nova.objects.instance [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1909.957091] env[62510]: DEBUG nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1909.959417] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769526, 'name': ReconfigVM_Task, 'duration_secs': 0.96825} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.959530] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1909.959916] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f7ca39c-e4fa-4d10-8e97-09e814cc8972 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.967845] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1909.967845] env[62510]: value = "task-1769528" [ 1909.967845] env[62510]: _type = "Task" [ 1909.967845] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.977201] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.984755] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.984755] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.984755] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.986210] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62190743-b34d-4664-8ffc-6e65113320a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.017744] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1910.018042] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1910.018283] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1910.018487] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1910.018635] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1910.018782] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1910.019007] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1910.019188] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1910.019356] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1910.019522] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1910.019697] env[62510]: DEBUG nova.virt.hardware [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1910.026669] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Reconfiguring VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1910.028735] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee00d77c-d2b5-4ad1-95aa-0e41d9bc1945 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.042969] env[62510]: DEBUG nova.network.neutron [-] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.050104] env[62510]: DEBUG oslo_vmware.api [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1910.050104] env[62510]: value = "task-1769529" [ 1910.050104] env[62510]: _type = "Task" [ 1910.050104] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.059993] env[62510]: DEBUG oslo_vmware.api [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769529, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.367976] env[62510]: DEBUG nova.compute.utils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1910.372130] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1910.372130] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1910.414624] env[62510]: DEBUG nova.policy [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e21fc3222846949a70dc17fba01e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99bae3a9008a46349842b33ce6e41b25', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1910.479298] env[62510]: DEBUG oslo_vmware.api [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769528, 'name': PowerOnVM_Task, 'duration_secs': 0.449789} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.480533] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.480722] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1910.483326] env[62510]: DEBUG nova.compute.manager [None req-66323921-27ca-4603-b35d-25af8b8f6321 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1910.484156] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f29ff18-fdb1-4f5d-95b1-5c3464de2913 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.546791] env[62510]: INFO nova.compute.manager [-] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Took 0.80 seconds to deallocate network for instance. [ 1910.563748] env[62510]: DEBUG oslo_vmware.api [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769529, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.675442] env[62510]: DEBUG nova.compute.manager [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-changed-98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1910.675649] env[62510]: DEBUG nova.compute.manager [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing instance network info cache due to event network-changed-98a986f8-1515-4f07-aee2-94ce84796db0. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1910.675951] env[62510]: DEBUG oslo_concurrency.lockutils [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1910.676528] env[62510]: DEBUG oslo_concurrency.lockutils [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1910.676868] env[62510]: DEBUG nova.network.neutron [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing network info cache for port 98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1910.874839] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1910.882482] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Successfully created port: 32ef9085-593e-45e8-b2f2-1200d914b69b {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1910.963034] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1910.963034] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a329bece-81f4-410d-9338-482b28ac1a74 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.968374] env[62510]: DEBUG oslo_vmware.api [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1910.968374] env[62510]: value = "task-1769530" [ 1910.968374] env[62510]: _type = "Task" [ 1910.968374] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.978226] env[62510]: DEBUG oslo_vmware.api [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769530, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.062673] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.071457] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.071688] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.076849] env[62510]: DEBUG oslo_vmware.api [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769529, 'name': ReconfigVM_Task, 'duration_secs': 0.705915} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.077696] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.078017] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Reconfigured VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1911.204026] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f02fed-63d0-4d64-8f8d-d3954cb4586e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.212113] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e89d25c-4a45-45fb-9d05-a0837b85da99 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.244016] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de90563-bb4c-40a7-a75b-fe704ae5e083 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.256939] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf37054c-de1d-41b8-b15c-a25b68403297 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.276532] env[62510]: DEBUG nova.compute.provider_tree [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1911.410961] env[62510]: DEBUG nova.network.neutron [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updated VIF entry in instance network info cache for port 98a986f8-1515-4f07-aee2-94ce84796db0. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1911.411480] env[62510]: DEBUG nova.network.neutron [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98a986f8-1515-4f07-aee2-94ce84796db0", "address": "fa:16:3e:c5:ae:21", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98a986f8-15", "ovs_interfaceid": "98a986f8-1515-4f07-aee2-94ce84796db0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.479798] env[62510]: DEBUG oslo_vmware.api [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769530, 'name': PowerOffVM_Task, 'duration_secs': 0.471809} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.480077] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1911.480279] env[62510]: DEBUG nova.compute.manager [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1911.481038] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6ae550-59ed-492c-aa1b-f4e134bb6248 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.578339] env[62510]: DEBUG nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1911.585910] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1ea85c8e-219a-4c35-9f54-056d59abe770 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-98a986f8-1515-4f07-aee2-94ce84796db0" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.516s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.781582] env[62510]: DEBUG nova.scheduler.client.report [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1911.888039] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1911.915415] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1911.915702] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1911.915859] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1911.916058] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1911.916208] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1911.916354] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1911.916567] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1911.916721] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1911.916886] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1911.917060] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1911.917235] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1911.917750] env[62510]: DEBUG oslo_concurrency.lockutils [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.918036] env[62510]: DEBUG nova.compute.manager [req-87e01d36-f0e0-4a8e-ad8f-13a23abd86c4 req-ef247fe9-ffab-415e-8257-87514cfd82b3 service nova] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Received event network-vif-deleted-f4736e99-c658-4d4e-ace8-a3b4552f43bf {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1911.918939] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2837673e-7f97-4afb-989e-71427f37682b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.926961] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef6fc70-c29d-4bbb-846d-121a0d07c453 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.992437] env[62510]: DEBUG oslo_concurrency.lockutils [None req-82e1259b-8cb7-4d11-a6ca-69f0b134c2a0 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.052s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.102180] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.228863] env[62510]: INFO nova.compute.manager [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Unrescuing [ 1912.230258] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.230258] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquired lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.230258] env[62510]: DEBUG nova.network.neutron [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.275459] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "fe3b3380-69bb-4563-abf2-9f0db439d31a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.275772] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.288390] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.288977] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1912.291747] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.675s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.291962] env[62510]: DEBUG nova.objects.instance [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'pci_requests' on Instance uuid cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1912.363702] env[62510]: DEBUG nova.compute.manager [req-a89e4dfd-ab76-4c43-87fe-e8f795382282 req-43721769-99bd-4294-b3d8-58b1da3add1f service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Received event network-vif-plugged-32ef9085-593e-45e8-b2f2-1200d914b69b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1912.363890] env[62510]: DEBUG oslo_concurrency.lockutils [req-a89e4dfd-ab76-4c43-87fe-e8f795382282 req-43721769-99bd-4294-b3d8-58b1da3add1f service nova] Acquiring lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.364170] env[62510]: DEBUG oslo_concurrency.lockutils [req-a89e4dfd-ab76-4c43-87fe-e8f795382282 req-43721769-99bd-4294-b3d8-58b1da3add1f service nova] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.364364] env[62510]: DEBUG oslo_concurrency.lockutils [req-a89e4dfd-ab76-4c43-87fe-e8f795382282 req-43721769-99bd-4294-b3d8-58b1da3add1f service nova] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.364536] env[62510]: DEBUG nova.compute.manager [req-a89e4dfd-ab76-4c43-87fe-e8f795382282 req-43721769-99bd-4294-b3d8-58b1da3add1f service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] No waiting events found dispatching network-vif-plugged-32ef9085-593e-45e8-b2f2-1200d914b69b {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1912.364705] env[62510]: WARNING nova.compute.manager [req-a89e4dfd-ab76-4c43-87fe-e8f795382282 req-43721769-99bd-4294-b3d8-58b1da3add1f service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Received unexpected event network-vif-plugged-32ef9085-593e-45e8-b2f2-1200d914b69b for instance with vm_state building and task_state spawning. [ 1912.777936] env[62510]: DEBUG nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1912.795792] env[62510]: DEBUG nova.compute.utils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1912.799302] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1912.799481] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1912.802432] env[62510]: DEBUG nova.objects.instance [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'numa_topology' on Instance uuid cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1912.841748] env[62510]: DEBUG nova.policy [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91e21fc3222846949a70dc17fba01e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99bae3a9008a46349842b33ce6e41b25', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1912.924179] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Successfully updated port: 32ef9085-593e-45e8-b2f2-1200d914b69b {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1912.954049] env[62510]: DEBUG nova.compute.manager [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Received event network-changed-32ef9085-593e-45e8-b2f2-1200d914b69b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1912.954256] env[62510]: DEBUG nova.compute.manager [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Refreshing instance network info cache due to event network-changed-32ef9085-593e-45e8-b2f2-1200d914b69b. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1912.954475] env[62510]: DEBUG oslo_concurrency.lockutils [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] Acquiring lock "refresh_cache-e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.954645] env[62510]: DEBUG oslo_concurrency.lockutils [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] Acquired lock "refresh_cache-e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.954809] env[62510]: DEBUG nova.network.neutron [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Refreshing network info cache for port 32ef9085-593e-45e8-b2f2-1200d914b69b {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1912.976915] env[62510]: DEBUG nova.network.neutron [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.005012] env[62510]: DEBUG nova.objects.instance [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.237052] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Successfully created port: b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1913.252444] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-98a986f8-1515-4f07-aee2-94ce84796db0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.252698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-98a986f8-1515-4f07-aee2-94ce84796db0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.301155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.304726] env[62510]: INFO nova.compute.claims [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1913.307457] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1913.428881] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "refresh_cache-e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.481522] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Releasing lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.481522] env[62510]: DEBUG nova.objects.instance [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lazy-loading 'flavor' on Instance uuid 5f229f78-6c5d-4170-bdd4-c5522b137949 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.486720] env[62510]: DEBUG nova.network.neutron [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1913.509693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.509958] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.510202] env[62510]: DEBUG nova.network.neutron [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1913.510438] env[62510]: DEBUG nova.objects.instance [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'info_cache' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.558441] env[62510]: DEBUG nova.network.neutron [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.755584] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.755853] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.756771] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad53a6e-4ba7-4ae7-a5df-03a6cfbed130 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.774921] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720f8091-ed87-4f67-9c25-43a72338500e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.800369] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Reconfiguring VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1913.800474] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-952890c8-a7a0-4b0b-aaef-86bc7f3e1b86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.827558] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1913.827558] env[62510]: value = "task-1769531" [ 1913.827558] env[62510]: _type = "Task" [ 1913.827558] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.835813] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.987904] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c0572c-dfba-4980-877b-d7fa4c7f8513 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.015392] env[62510]: DEBUG nova.objects.base [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Object Instance<92cb4e54-a00e-4974-b134-22d302932e32> lazy-loaded attributes: flavor,info_cache {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1914.016759] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1914.017447] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4b93578-a13b-48f3-ba4d-72ae52b35468 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.025328] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1914.025328] env[62510]: value = "task-1769532" [ 1914.025328] env[62510]: _type = "Task" [ 1914.025328] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.034967] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769532, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.061278] env[62510]: DEBUG oslo_concurrency.lockutils [req-aa3f99b8-b800-4e5c-ae6c-94d1f760354f req-9b82d420-d150-4a27-9e36-930270cd01d7 service nova] Releasing lock "refresh_cache-e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.061655] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "refresh_cache-e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.061815] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1914.323608] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1914.337874] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.350989] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1914.351235] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1914.351470] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1914.351709] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1914.351896] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1914.352066] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1914.352279] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1914.352436] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1914.352599] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1914.352759] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1914.352926] env[62510]: DEBUG nova.virt.hardware [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1914.353725] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b68b150-7916-43d1-a855-6647f814611a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.363557] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46d5ebd-7647-4c02-aa12-dc542fb0a0e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.538657] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769532, 'name': PowerOffVM_Task, 'duration_secs': 0.225316} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.539795] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1914.545040] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfiguring VM instance instance-00000061 to detach disk 2002 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1914.545868] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42a5212-8543-4422-a5e9-28a1b9ac67ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.550017] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e63309a0-034f-4eaf-8a09-b912d3e1e37c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.568461] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad246e36-a959-4734-a9e6-7d4d99d171c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.572402] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1914.572402] env[62510]: value = "task-1769533" [ 1914.572402] env[62510]: _type = "Task" [ 1914.572402] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.601625] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49fc308-1cd2-4634-9209-02d75c95b701 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.607222] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769533, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.608443] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1914.613220] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7c3be6-e54d-4781-9cf4-36ccbb8be0f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.628330] env[62510]: DEBUG nova.compute.provider_tree [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1914.728206] env[62510]: DEBUG nova.compute.manager [req-e61fc15e-b883-4a29-aa2a-9e7389ccbdc2 req-ee73680e-4914-4918-b5de-15b317167a73 service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Received event network-vif-plugged-b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1914.728526] env[62510]: DEBUG oslo_concurrency.lockutils [req-e61fc15e-b883-4a29-aa2a-9e7389ccbdc2 req-ee73680e-4914-4918-b5de-15b317167a73 service nova] Acquiring lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.728611] env[62510]: DEBUG oslo_concurrency.lockutils [req-e61fc15e-b883-4a29-aa2a-9e7389ccbdc2 req-ee73680e-4914-4918-b5de-15b317167a73 service nova] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.728782] env[62510]: DEBUG oslo_concurrency.lockutils [req-e61fc15e-b883-4a29-aa2a-9e7389ccbdc2 req-ee73680e-4914-4918-b5de-15b317167a73 service nova] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.729155] env[62510]: DEBUG nova.compute.manager [req-e61fc15e-b883-4a29-aa2a-9e7389ccbdc2 req-ee73680e-4914-4918-b5de-15b317167a73 service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] No waiting events found dispatching network-vif-plugged-b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1914.729382] env[62510]: WARNING nova.compute.manager [req-e61fc15e-b883-4a29-aa2a-9e7389ccbdc2 req-ee73680e-4914-4918-b5de-15b317167a73 service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Received unexpected event network-vif-plugged-b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 for instance with vm_state building and task_state spawning. [ 1914.833126] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Updating instance_info_cache with network_info: [{"id": "32ef9085-593e-45e8-b2f2-1200d914b69b", "address": "fa:16:3e:62:d4:d0", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ef9085-59", "ovs_interfaceid": "32ef9085-593e-45e8-b2f2-1200d914b69b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.840320] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1914.866577] env[62510]: DEBUG nova.network.neutron [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updating instance_info_cache with network_info: [{"id": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "address": "fa:16:3e:05:6e:0f", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68c246e2-71", "ovs_interfaceid": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.876456] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Successfully updated port: b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1915.082911] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769533, 'name': ReconfigVM_Task, 'duration_secs': 0.236579} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.083269] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfigured VM instance instance-00000061 to detach disk 2002 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1915.084036] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1915.084036] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b03daf13-666b-46c9-b40d-0f0acc47ac27 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.090539] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1915.090539] env[62510]: value = "task-1769534" [ 1915.090539] env[62510]: _type = "Task" [ 1915.090539] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.097988] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.131428] env[62510]: DEBUG nova.scheduler.client.report [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1915.339169] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "refresh_cache-e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.339481] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Instance network_info: |[{"id": "32ef9085-593e-45e8-b2f2-1200d914b69b", "address": "fa:16:3e:62:d4:d0", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ef9085-59", "ovs_interfaceid": "32ef9085-593e-45e8-b2f2-1200d914b69b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1915.339772] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.340139] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:d4:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32ef9085-593e-45e8-b2f2-1200d914b69b', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1915.347641] env[62510]: DEBUG oslo.service.loopingcall [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1915.347875] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1915.348133] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b62a8841-0a0b-43bc-be04-559e013f7854 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.368271] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1915.368271] env[62510]: value = "task-1769535" [ 1915.368271] env[62510]: _type = "Task" [ 1915.368271] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.368851] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.378159] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769535, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.379237] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "refresh_cache-cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.379437] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "refresh_cache-cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.379510] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1915.603054] env[62510]: DEBUG oslo_vmware.api [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769534, 'name': PowerOnVM_Task, 'duration_secs': 0.390424} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.603054] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1915.603054] env[62510]: DEBUG nova.compute.manager [None req-9b7152c9-2cd2-47f0-b178-908ce04c6bbc tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1915.603054] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f64d223-ffb0-4bb3-bbcf-0328ddf09be4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.636466] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.345s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.638716] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.938s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.640727] env[62510]: INFO nova.compute.claims [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1915.690130] env[62510]: INFO nova.network.neutron [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating port 022a0379-8a0f-412f-a55a-f8fcaf1102f3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1915.840580] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.879110] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769535, 'name': CreateVM_Task, 'duration_secs': 0.408753} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.879516] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1915.880348] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1915.880512] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.880890] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1915.882835] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4220fb85-1334-4e3f-b986-d640a6f6f05e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.888415] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1915.888415] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521015c7-be0c-0890-1fbe-409d0bd5310e" [ 1915.888415] env[62510]: _type = "Task" [ 1915.888415] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.896780] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521015c7-be0c-0890-1fbe-409d0bd5310e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.937307] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1916.082645] env[62510]: DEBUG nova.network.neutron [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Updating instance_info_cache with network_info: [{"id": "b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8", "address": "fa:16:3e:a4:9e:1a", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0bc3c6a-51", "ovs_interfaceid": "b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.115904] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.116201] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.340098] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.380596] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1916.380908] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5548061-dc78-48b2-b5b6-6bf9e6556ec5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.389246] env[62510]: DEBUG oslo_vmware.api [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1916.389246] env[62510]: value = "task-1769536" [ 1916.389246] env[62510]: _type = "Task" [ 1916.389246] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.401317] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521015c7-be0c-0890-1fbe-409d0bd5310e, 'name': SearchDatastore_Task, 'duration_secs': 0.010082} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.404335] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.404582] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1916.404810] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.404957] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.405155] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1916.405413] env[62510]: DEBUG oslo_vmware.api [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769536, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.405622] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7855906-c9d9-4914-880c-28c4b161d6b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.421997] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1916.422207] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1916.422992] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5d2316a-5960-4dee-a82a-bdb29e1cf3e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.429061] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1916.429061] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529b3d51-2a1b-77b7-87fd-e28003c186a7" [ 1916.429061] env[62510]: _type = "Task" [ 1916.429061] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.437288] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529b3d51-2a1b-77b7-87fd-e28003c186a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.585178] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "refresh_cache-cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.585583] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Instance network_info: |[{"id": "b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8", "address": "fa:16:3e:a4:9e:1a", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0bc3c6a-51", "ovs_interfaceid": "b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1916.586091] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:9e:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2e614f8e-6b11-4b6b-a421-904bca6acd91', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1916.594211] env[62510]: DEBUG oslo.service.loopingcall [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1916.594530] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1916.594863] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39b65665-51db-4084-87c3-fa573a080c9b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.617876] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1916.617876] env[62510]: value = "task-1769537" [ 1916.617876] env[62510]: _type = "Task" [ 1916.617876] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.625484] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.625700] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1916.625768] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 1916.630620] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769537, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.757411] env[62510]: DEBUG nova.compute.manager [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Received event network-changed-b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1916.757595] env[62510]: DEBUG nova.compute.manager [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Refreshing instance network info cache due to event network-changed-b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1916.757806] env[62510]: DEBUG oslo_concurrency.lockutils [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] Acquiring lock "refresh_cache-cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.758046] env[62510]: DEBUG oslo_concurrency.lockutils [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] Acquired lock "refresh_cache-cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.758222] env[62510]: DEBUG nova.network.neutron [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Refreshing network info cache for port b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1916.842095] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.901599] env[62510]: DEBUG oslo_vmware.api [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769536, 'name': PowerOnVM_Task, 'duration_secs': 0.43431} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.903911] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1916.904177] env[62510]: DEBUG nova.compute.manager [None req-a2ca87b8-0442-49c8-b7e8-0bdc54ad9bc8 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1916.905117] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25eb2f6e-0b71-4677-bf02-74508ce08453 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.915107] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7d2d16-c9d1-4a4b-9a6e-3560eb996f9e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.924147] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820f5cda-6d80-45e6-be0a-5e5e3e25880d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.962835] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f170388-2652-4ea7-811d-247e59b813f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.969507] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529b3d51-2a1b-77b7-87fd-e28003c186a7, 'name': SearchDatastore_Task, 'duration_secs': 0.008797} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.970700] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbc0661a-c5bb-4dbd-a46f-bfec1cc31485 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.976548] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0936bae6-bc06-4343-8da8-3073d012a799 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.981655] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1916.981655] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e95aee-6b3c-c0f0-4021-af6592000aad" [ 1916.981655] env[62510]: _type = "Task" [ 1916.981655] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.992705] env[62510]: DEBUG nova.compute.provider_tree [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1916.999596] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e95aee-6b3c-c0f0-4021-af6592000aad, 'name': SearchDatastore_Task, 'duration_secs': 0.008312} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.999596] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.999596] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c/e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1916.999596] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bde7b7ee-5214-4fef-9c9a-6532df1214aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.006372] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1917.006372] env[62510]: value = "task-1769538" [ 1917.006372] env[62510]: _type = "Task" [ 1917.006372] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.016246] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769538, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.138145] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Skipping network cache update for instance because it is Building. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1917.138429] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Skipping network cache update for instance because it is Building. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1917.138638] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Skipping network cache update for instance because it is Building. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 1917.141911] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769537, 'name': CreateVM_Task, 'duration_secs': 0.330376} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.142655] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1917.144073] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.144384] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.146869] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1917.146869] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d1d3fb6-18f0-4968-b934-d4f0282b291f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.152278] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1917.152278] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e21148-76b8-936f-526e-56e69e4ca6ae" [ 1917.152278] env[62510]: _type = "Task" [ 1917.152278] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.164165] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e21148-76b8-936f-526e-56e69e4ca6ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.170644] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.170788] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.170942] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1917.171151] env[62510]: DEBUG nova.objects.instance [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lazy-loading 'info_cache' on Instance uuid 8ffa27e9-6a3b-48d1-aed4-c808089788d9 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1917.344452] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.469239] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.469485] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.469697] env[62510]: DEBUG nova.network.neutron [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1917.496185] env[62510]: DEBUG nova.scheduler.client.report [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1917.517466] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769538, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457089} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.518423] env[62510]: DEBUG nova.network.neutron [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Updated VIF entry in instance network info cache for port b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1917.518771] env[62510]: DEBUG nova.network.neutron [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Updating instance_info_cache with network_info: [{"id": "b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8", "address": "fa:16:3e:a4:9e:1a", "network": {"id": "dd80546c-dbc1-461a-8b4a-342b8a63957b", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-174038375-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99bae3a9008a46349842b33ce6e41b25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e614f8e-6b11-4b6b-a421-904bca6acd91", "external-id": "nsx-vlan-transportzone-923", "segmentation_id": 923, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0bc3c6a-51", "ovs_interfaceid": "b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1917.520070] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c/e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1917.520327] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1917.520784] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d919b4df-131b-462d-a44a-d3e50e938cf0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.530643] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1917.530643] env[62510]: value = "task-1769539" [ 1917.530643] env[62510]: _type = "Task" [ 1917.530643] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.539536] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.664042] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e21148-76b8-936f-526e-56e69e4ca6ae, 'name': SearchDatastore_Task, 'duration_secs': 0.067521} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.664411] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.664709] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1917.664989] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.665186] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.665401] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1917.665704] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ad983fb-6a15-434a-8be5-39d2c8e97440 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.674085] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1917.674264] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1917.677107] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ce8efed-70cb-4616-a251-1c418428374d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.682806] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1917.682806] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527e8827-a5cd-5f7d-2bd0-583e0caa75ad" [ 1917.682806] env[62510]: _type = "Task" [ 1917.682806] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.692176] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527e8827-a5cd-5f7d-2bd0-583e0caa75ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.840753] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.003041] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.003041] env[62510]: DEBUG nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1918.005751] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.122s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.006053] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.008286] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.528s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.011072] env[62510]: INFO nova.compute.claims [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1918.021986] env[62510]: DEBUG oslo_concurrency.lockutils [req-5e378c7b-bef5-45f6-a30e-39322422831c req-48e40921-f589-4144-8141-f12f32b1a63b service nova] Releasing lock "refresh_cache-cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.032537] env[62510]: INFO nova.scheduler.client.report [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted allocations for instance 841460b0-d917-44ea-88c6-0e5a3022f658 [ 1918.047740] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073091} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.048150] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1918.049761] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06bb54f-01f2-4321-9119-548f7e34aa83 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.076272] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c/e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1918.077123] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-060b36a6-8baf-4f3b-a6c5-20646f3e42a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.104094] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1918.104094] env[62510]: value = "task-1769540" [ 1918.104094] env[62510]: _type = "Task" [ 1918.104094] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.114508] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769540, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.195566] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527e8827-a5cd-5f7d-2bd0-583e0caa75ad, 'name': SearchDatastore_Task, 'duration_secs': 0.009073} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.196437] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e2195e8-b8be-4874-98b6-90e7c9b8fed7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.199431] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1918.203569] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1918.203569] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c760ed-2373-ae94-53a0-2a19676dc045" [ 1918.203569] env[62510]: _type = "Task" [ 1918.203569] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.213143] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c760ed-2373-ae94-53a0-2a19676dc045, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.266712] env[62510]: DEBUG nova.network.neutron [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap022a0379-8a", "ovs_interfaceid": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.343193] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.515568] env[62510]: DEBUG nova.compute.utils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1918.520027] env[62510]: DEBUG nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1918.520232] env[62510]: DEBUG nova.network.neutron [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1918.545698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4b97c700-cf3a-43d9-b723-f73494283c75 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "841460b0-d917-44ea-88c6-0e5a3022f658" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.813s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.568515] env[62510]: DEBUG nova.policy [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbc6eced57ea45fdafc3635a58fb3611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f878b652f01c48139bfc6996e5e32f5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1918.616415] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.721533] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c760ed-2373-ae94-53a0-2a19676dc045, 'name': SearchDatastore_Task, 'duration_secs': 0.032061} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.721852] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.722142] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74/cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1918.722423] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-24ec27af-52f0-473a-868f-5dc918252471 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.730803] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1918.730803] env[62510]: value = "task-1769541" [ 1918.730803] env[62510]: _type = "Task" [ 1918.730803] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.740912] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.772398] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.804435] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Received event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1918.805529] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing instance network info cache due to event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1918.805529] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Acquiring lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.805529] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Acquired lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.805529] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1918.813880] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='e091ba027ba426c109df0ea48547a9be',container_format='bare',created_at=2024-12-11T19:42:32Z,direct_url=,disk_format='vmdk',id=da7c8e66-0047-4492-9c76-db7e729079e0,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-497036740-shelved',owner='3109fa7889c64dfda2117d4cd58aa528',properties=ImageMetaProps,protected=,size=31661568,status='active',tags=,updated_at=2024-12-11T19:42:51Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1918.814199] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1918.814316] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1918.816034] env[62510]: DEBUG nova.virt.hardware [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1918.817443] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2759566e-5f8d-4585-ba72-1a6264b81366 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.833424] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331833bd-1979-48b6-a447-52beaa901f71 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.836189] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.858977] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:f0:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6ba56987-9dc3-4c76-a4e2-942b05355bdb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '022a0379-8a0f-412f-a55a-f8fcaf1102f3', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1918.868208] env[62510]: DEBUG oslo.service.loopingcall [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1918.868818] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.869373] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1918.872052] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e95e74d0-7308-4b8b-b557-b62a92cc04d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.894405] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1918.894405] env[62510]: value = "task-1769542" [ 1918.894405] env[62510]: _type = "Task" [ 1918.894405] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.903631] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769542, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.904541] env[62510]: DEBUG nova.network.neutron [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Successfully created port: 5717b8b0-bfb7-4f74-9ac3-833161c514f8 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1919.020773] env[62510]: DEBUG nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1919.120252] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769540, 'name': ReconfigVM_Task, 'duration_secs': 0.827502} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.123378] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Reconfigured VM instance instance-0000006c to attach disk [datastore1] e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c/e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1919.127325] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7a544ba-0bbb-4cd5-9361-0227c623a2d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.138403] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1919.138403] env[62510]: value = "task-1769543" [ 1919.138403] env[62510]: _type = "Task" [ 1919.138403] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.157209] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769543, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.244488] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769541, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.277822] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updated VIF entry in instance network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1919.281507] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.340045] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-8ffa27e9-6a3b-48d1-aed4-c808089788d9" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.340318] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1919.344648] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.344873] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.345481] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.345659] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.345883] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.346125] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_power_states {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.351721] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.383042] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a09b74-8c51-466b-9778-0a36632cccbd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.392389] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a535794-4ede-4a06-9afa-369544700762 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.407321] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769542, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.436490] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "22002fc1-647e-4e65-a5f0-c3a34575985f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.436747] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.441024] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c58f82-acff-41ff-8d6e-b0f02f2e0e62 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.447215] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db29f8a-b367-426d-853f-c4e3de2ed362 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.461749] env[62510]: DEBUG nova.compute.provider_tree [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.651733] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769543, 'name': Rename_Task, 'duration_secs': 0.3089} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.652017] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1919.652312] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7acc5935-017b-4988-9f70-fc0e8bad265a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.659438] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1919.659438] env[62510]: value = "task-1769544" [ 1919.659438] env[62510]: _type = "Task" [ 1919.659438] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.667811] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769544, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.741683] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.756843} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.742570] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74/cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1919.742570] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1919.742836] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8b67350d-0d65-4772-af9d-88957df7ab8a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.752486] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1919.752486] env[62510]: value = "task-1769545" [ 1919.752486] env[62510]: _type = "Task" [ 1919.752486] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.762197] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769545, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.782969] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Releasing lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.783280] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Received event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1919.783514] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing instance network info cache due to event network-changed-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1919.783725] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Acquiring lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.783863] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Acquired lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.784064] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Refreshing network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1919.852729] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Getting list of instances from cluster (obj){ [ 1919.852729] env[62510]: value = "domain-c8" [ 1919.852729] env[62510]: _type = "ClusterComputeResource" [ 1919.852729] env[62510]: } {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1919.853091] env[62510]: DEBUG oslo_vmware.api [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769531, 'name': ReconfigVM_Task, 'duration_secs': 5.961841} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.853970] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a360b89f-e83b-408b-99c8-a2d4e6f4f61b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.857075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.857291] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Reconfigured VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1919.879316] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Got total of 13 instances {{(pid=62510) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1919.879496] env[62510]: WARNING nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] While synchronizing instance power states, found 16 instances in the database and 13 instances on the hypervisor. [ 1919.879640] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 8ffa27e9-6a3b-48d1-aed4-c808089788d9 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.879836] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 4e735bb6-f167-4c2b-b44e-d2dd3040603d {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.879999] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.880169] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.880314] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 8a230335-6388-45fb-a29e-9e63ddb4d5f2 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.880460] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.880608] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 5f229f78-6c5d-4170-bdd4-c5522b137949 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.880751] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.880900] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.881060] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.881211] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 72f8492b-304a-4451-ab40-4cdfe36b9e19 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.881355] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 9956e5d2-edda-47af-a3df-743ebed1154b {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.881502] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.881640] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.881779] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 14f08e1c-bf2a-4dca-9770-8ceb311130e3 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.881920] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Triggering sync for uuid 11490e72-b9a5-4e8e-86c4-300c594cd914 {{(pid=62510) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10719}} [ 1919.882327] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.882568] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.882755] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.883035] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.883223] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.883452] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.883626] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.883844] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.884069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.884262] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.884499] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.884728] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.885052] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.885247] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.885478] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.885653] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.885895] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.886064] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.886291] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.886463] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.886826] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "9956e5d2-edda-47af-a3df-743ebed1154b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.887031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "9956e5d2-edda-47af-a3df-743ebed1154b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.887261] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.887466] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.887660] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.887906] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "11490e72-b9a5-4e8e-86c4-300c594cd914" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.888096] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.888241] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1919.891034] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7feebdea-70ae-47dd-bf17-828c3d2f6644 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.894060] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807a16ee-652e-4cc6-a0a0-8c6ae2ef644d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.896657] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26e06f9-7dc5-4140-989f-34f812d8e92c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.899335] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dea8510-7097-4b5a-a581-3106cb40a1c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.901864] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a63086f-004b-4132-a362-cd0e90aff1bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.905182] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178f25a3-2c54-4346-a5ea-8057ac35c426 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.908119] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f412b964-0840-4cb5-8159-d82ba902fcd4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.911111] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5b8bcf-ba7a-4416-b723-c36e31920cc7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.913954] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090e2c00-8adb-44d0-9f9d-b4c095caa8d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.916899] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db909294-a374-4262-a4f0-67013575e99a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.919349] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.939782] env[62510]: DEBUG nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1919.969012] env[62510]: WARNING oslo_messaging._drivers.amqpdriver [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1919.970665] env[62510]: DEBUG nova.scheduler.client.report [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1919.980122] env[62510]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1919.987671] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769542, 'name': CreateVM_Task, 'duration_secs': 0.833243} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.988122] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1919.988786] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.988948] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.989350] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1919.989611] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49be1204-4a02-46eb-9d90-d8caac092246 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.995456] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1919.995456] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52173fbc-4fa6-c7eb-6fa8-89042a6b2b9e" [ 1919.995456] env[62510]: _type = "Task" [ 1919.995456] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.016616] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.016616] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Processing image da7c8e66-0047-4492-9c76-db7e729079e0 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1920.016616] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0/da7c8e66-0047-4492-9c76-db7e729079e0.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.016616] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0/da7c8e66-0047-4492-9c76-db7e729079e0.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.016913] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1920.018198] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a09986b-5490-4c53-8921-2e31dc7be47f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.033804] env[62510]: DEBUG nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1920.036468] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1920.036659] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1920.038596] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da95868-8175-4b28-b70f-1a0ef6ab19c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.046299] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1920.046299] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e35d20-d776-58c3-3b96-7a2eccd31e33" [ 1920.046299] env[62510]: _type = "Task" [ 1920.046299] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.054874] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e35d20-d776-58c3-3b96-7a2eccd31e33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.064166] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1920.064413] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1920.064570] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1920.064779] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1920.064936] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1920.065055] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1920.065276] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1920.065470] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1920.065692] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1920.065905] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1920.066059] env[62510]: DEBUG nova.virt.hardware [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1920.066862] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7a8d6a-f015-45bf-a0bd-597bfb92064e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.074713] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c575d497-76cc-44a1-a65e-258493ebcc5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.169551] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769544, 'name': PowerOnVM_Task, 'duration_secs': 0.492767} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.169878] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1920.170118] env[62510]: INFO nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Took 8.28 seconds to spawn the instance on the hypervisor. [ 1920.170316] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1920.171110] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892d3eca-da0c-4fea-a72f-5703b694edcd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.262665] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769545, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07107} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.263010] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1920.263666] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f801126-3785-471b-9b07-21669f58c6ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.286381] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74/cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1920.289081] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb5a3982-5d1f-46fc-8c26-81645da21ec0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.309899] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1920.309899] env[62510]: value = "task-1769546" [ 1920.309899] env[62510]: _type = "Task" [ 1920.309899] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.319093] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769546, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.422300] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.448052] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.565s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.457797] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.572s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.457797] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.575s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.458264] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.575s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.468214] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.468598] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "9956e5d2-edda-47af-a3df-743ebed1154b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.582s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.471060] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.585s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.474775] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.467s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.475279] env[62510]: DEBUG nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1920.478498] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.416s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.478725] env[62510]: DEBUG nova.objects.instance [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lazy-loading 'resources' on Instance uuid 8ffa27e9-6a3b-48d1-aed4-c808089788d9 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1920.479791] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.593s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.480100] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.595s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.487175] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.601s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.488619] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.603s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.557662] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Preparing fetch location {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1920.557985] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Fetch image to [datastore1] OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c/OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c.vmdk {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1920.558237] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Downloading stream optimized image da7c8e66-0047-4492-9c76-db7e729079e0 to [datastore1] OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c/OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c.vmdk on the data store datastore1 as vApp {{(pid=62510) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1920.558367] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Downloading image file data da7c8e66-0047-4492-9c76-db7e729079e0 to the ESX as VM named 'OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c' {{(pid=62510) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1920.639285] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updated VIF entry in instance network info cache for port 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1920.640044] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [{"id": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "address": "fa:16:3e:ff:14:88", "network": {"id": "3b8d6085-89b4-4ce1-b2d3-a23177f0eb79", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-951886226-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de0d125bba6242d3b9614402098efc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d5c92f8-54", "ovs_interfaceid": "4d5c92f8-54e7-4731-bc8e-a3598f21a0b6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.649260] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1920.649260] env[62510]: value = "resgroup-9" [ 1920.649260] env[62510]: _type = "ResourcePool" [ 1920.649260] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1920.649574] env[62510]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9d429c28-4465-4253-82f2-649035b6b100 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.671183] env[62510]: DEBUG nova.compute.manager [req-1b3e42e0-4803-4b72-bace-627b05094036 req-89f2fd30-d45c-489f-8501-ff1da289c23f service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Received event network-vif-plugged-5717b8b0-bfb7-4f74-9ac3-833161c514f8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1920.671398] env[62510]: DEBUG oslo_concurrency.lockutils [req-1b3e42e0-4803-4b72-bace-627b05094036 req-89f2fd30-d45c-489f-8501-ff1da289c23f service nova] Acquiring lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.671598] env[62510]: DEBUG oslo_concurrency.lockutils [req-1b3e42e0-4803-4b72-bace-627b05094036 req-89f2fd30-d45c-489f-8501-ff1da289c23f service nova] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.671763] env[62510]: DEBUG oslo_concurrency.lockutils [req-1b3e42e0-4803-4b72-bace-627b05094036 req-89f2fd30-d45c-489f-8501-ff1da289c23f service nova] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.671927] env[62510]: DEBUG nova.compute.manager [req-1b3e42e0-4803-4b72-bace-627b05094036 req-89f2fd30-d45c-489f-8501-ff1da289c23f service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] No waiting events found dispatching network-vif-plugged-5717b8b0-bfb7-4f74-9ac3-833161c514f8 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1920.672107] env[62510]: WARNING nova.compute.manager [req-1b3e42e0-4803-4b72-bace-627b05094036 req-89f2fd30-d45c-489f-8501-ff1da289c23f service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Received unexpected event network-vif-plugged-5717b8b0-bfb7-4f74-9ac3-833161c514f8 for instance with vm_state building and task_state spawning. [ 1920.679400] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease: (returnval){ [ 1920.679400] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223c792-e859-3b83-7891-2a8735b8e759" [ 1920.679400] env[62510]: _type = "HttpNfcLease" [ 1920.679400] env[62510]: } obtained for vApp import into resource pool (val){ [ 1920.679400] env[62510]: value = "resgroup-9" [ 1920.679400] env[62510]: _type = "ResourcePool" [ 1920.679400] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1920.679730] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the lease: (returnval){ [ 1920.679730] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223c792-e859-3b83-7891-2a8735b8e759" [ 1920.679730] env[62510]: _type = "HttpNfcLease" [ 1920.679730] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1920.690029] env[62510]: INFO nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Took 22.59 seconds to build instance. [ 1920.694302] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1920.694302] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223c792-e859-3b83-7891-2a8735b8e759" [ 1920.694302] env[62510]: _type = "HttpNfcLease" [ 1920.694302] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1920.756935] env[62510]: DEBUG nova.network.neutron [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Successfully updated port: 5717b8b0-bfb7-4f74-9ac3-833161c514f8 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1920.820552] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769546, 'name': ReconfigVM_Task, 'duration_secs': 0.506738} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.821630] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Reconfigured VM instance instance-0000006d to attach disk [datastore1] cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74/cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1920.821789] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb5190c5-841e-4c86-9817-64368d4e86f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.827903] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1920.827903] env[62510]: value = "task-1769548" [ 1920.827903] env[62510]: _type = "Task" [ 1920.827903] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.835836] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769548, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.982022] env[62510]: DEBUG nova.compute.utils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1920.982022] env[62510]: DEBUG nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1920.982022] env[62510]: DEBUG nova.network.neutron [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1921.037820] env[62510]: DEBUG nova.policy [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93399cd69f4245188fd39bde29ee3d5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11c021c6b45c452f83732fe578e576f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1921.145425] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Releasing lock "refresh_cache-5f229f78-6c5d-4170-bdd4-c5522b137949" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.145697] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-vif-plugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1921.145949] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.146197] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.146380] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.146523] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] No waiting events found dispatching network-vif-plugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1921.146686] env[62510]: WARNING nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received unexpected event network-vif-plugged-022a0379-8a0f-412f-a55a-f8fcaf1102f3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1921.146844] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1921.147015] env[62510]: DEBUG nova.compute.manager [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing instance network info cache due to event network-changed-022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1921.147203] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Acquiring lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.147335] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Acquired lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.147521] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Refreshing network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1921.195354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.101s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.196070] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1921.196070] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223c792-e859-3b83-7891-2a8735b8e759" [ 1921.196070] env[62510]: _type = "HttpNfcLease" [ 1921.196070] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1921.196691] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.309s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.196946] env[62510]: INFO nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] During sync_power_state the instance has a pending task (spawning). Skip. [ 1921.197225] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.197540] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1921.197540] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5223c792-e859-3b83-7891-2a8735b8e759" [ 1921.197540] env[62510]: _type = "HttpNfcLease" [ 1921.197540] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1921.198431] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d5da09-850b-4128-9af6-5b22323c5e4f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.206871] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee26cb-3e12-f783-7470-6092b0e2f137/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1921.206960] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating HTTP connection to write to file with size = 31661568 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee26cb-3e12-f783-7470-6092b0e2f137/disk-0.vmdk. {{(pid=62510) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1921.279320] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.279597] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.279968] env[62510]: DEBUG nova.network.neutron [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1921.281392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-14f08e1c-bf2a-4dca-9770-8ceb311130e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.281555] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-14f08e1c-bf2a-4dca-9770-8ceb311130e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.281731] env[62510]: DEBUG nova.network.neutron [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1921.292551] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1eee62b9-cd45-4849-b114-5dfd3ae31501 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.342233] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769548, 'name': Rename_Task, 'duration_secs': 0.129373} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.342625] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1921.343172] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0bfc461-17dd-4f7b-9f6d-814e062994eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.347818] env[62510]: DEBUG nova.network.neutron [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Successfully created port: b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1921.351945] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1921.351945] env[62510]: value = "task-1769549" [ 1921.351945] env[62510]: _type = "Task" [ 1921.351945] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.353054] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145d6982-4e4a-42fd-ac45-c2e62a0f3f38 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.363310] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.365984] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42314e6d-adce-47b9-8779-3d4fb33aed8f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.401128] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3db7998-a428-4195-926d-4c106dd291b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.411871] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc48ba2-6954-4493-909b-ab07d5dec640 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.428954] env[62510]: DEBUG nova.compute.provider_tree [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1921.492158] env[62510]: DEBUG nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1921.869877] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769549, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.879158] env[62510]: DEBUG nova.network.neutron [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1921.893229] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updated VIF entry in instance network info cache for port 022a0379-8a0f-412f-a55a-f8fcaf1102f3. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1921.893229] env[62510]: DEBUG nova.network.neutron [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [{"id": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "address": "fa:16:3e:fc:f0:87", "network": {"id": "3958d418-1b64-4598-975c-02b13c976ce5", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1692593298-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3109fa7889c64dfda2117d4cd58aa528", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6ba56987-9dc3-4c76-a4e2-942b05355bdb", "external-id": "nsx-vlan-transportzone-698", "segmentation_id": 698, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap022a0379-8a", "ovs_interfaceid": "022a0379-8a0f-412f-a55a-f8fcaf1102f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.932258] env[62510]: DEBUG nova.scheduler.client.report [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1922.262305] env[62510]: DEBUG nova.network.neutron [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Updating instance_info_cache with network_info: [{"id": "5717b8b0-bfb7-4f74-9ac3-833161c514f8", "address": "fa:16:3e:93:b1:3c", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5717b8b0-bf", "ovs_interfaceid": "5717b8b0-bfb7-4f74-9ac3-833161c514f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.363172] env[62510]: INFO nova.network.neutron [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Port 98a986f8-1515-4f07-aee2-94ce84796db0 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1922.363735] env[62510]: DEBUG nova.network.neutron [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.368063] env[62510]: DEBUG oslo_vmware.api [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769549, 'name': PowerOnVM_Task, 'duration_secs': 0.588837} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.368543] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1922.368940] env[62510]: INFO nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Took 8.04 seconds to spawn the instance on the hypervisor. [ 1922.368940] env[62510]: DEBUG nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1922.369859] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c29e6fb-c9d3-4eef-b4c2-36be63586ff7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.399946] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bc90e8b-e7b2-41d0-a7ba-b429b1606c30 req-32928981-5534-4188-9b6c-2f03b070b876 service nova] Releasing lock "refresh_cache-cf4160a8-1160-45fc-b9e5-e9526b6c1506" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.438732] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.960s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.441469] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.339s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.443163] env[62510]: INFO nova.compute.claims [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1922.472792] env[62510]: INFO nova.scheduler.client.report [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Deleted allocations for instance 8ffa27e9-6a3b-48d1-aed4-c808089788d9 [ 1922.505637] env[62510]: DEBUG nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1922.540038] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1922.540354] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1922.540558] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1922.540709] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1922.540860] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1922.541016] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1922.541285] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1922.541456] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1922.541629] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1922.541796] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1922.541963] env[62510]: DEBUG nova.virt.hardware [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1922.542910] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7e28dd-e3c8-47c8-ac0b-0655e992f516 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.555535] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be93248-bfdf-4246-ae46-566a110f8416 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.578430] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Completed reading data from the image iterator. {{(pid=62510) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1922.578742] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee26cb-3e12-f783-7470-6092b0e2f137/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1922.579706] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b63d079-5e17-468a-9f57-e86211d321bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.587052] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee26cb-3e12-f783-7470-6092b0e2f137/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1922.587246] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee26cb-3e12-f783-7470-6092b0e2f137/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1922.587497] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-71d0dddb-feaf-4b4d-93e1-7c58bd8e03cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.690750] env[62510]: DEBUG nova.compute.manager [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-changed-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1922.691567] env[62510]: DEBUG nova.compute.manager [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing instance network info cache due to event network-changed-f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1922.691861] env[62510]: DEBUG oslo_concurrency.lockutils [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] Acquiring lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.710644] env[62510]: DEBUG nova.compute.manager [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Received event network-changed-5717b8b0-bfb7-4f74-9ac3-833161c514f8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1922.710944] env[62510]: DEBUG nova.compute.manager [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Refreshing instance network info cache due to event network-changed-5717b8b0-bfb7-4f74-9ac3-833161c514f8. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1922.711180] env[62510]: DEBUG oslo_concurrency.lockutils [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] Acquiring lock "refresh_cache-14f08e1c-bf2a-4dca-9770-8ceb311130e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.766443] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-14f08e1c-bf2a-4dca-9770-8ceb311130e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.766895] env[62510]: DEBUG nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Instance network_info: |[{"id": "5717b8b0-bfb7-4f74-9ac3-833161c514f8", "address": "fa:16:3e:93:b1:3c", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5717b8b0-bf", "ovs_interfaceid": "5717b8b0-bfb7-4f74-9ac3-833161c514f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1922.767289] env[62510]: DEBUG oslo_concurrency.lockutils [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] Acquired lock "refresh_cache-14f08e1c-bf2a-4dca-9770-8ceb311130e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.767550] env[62510]: DEBUG nova.network.neutron [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Refreshing network info cache for port 5717b8b0-bfb7-4f74-9ac3-833161c514f8 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1922.768836] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:b1:3c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5717b8b0-bfb7-4f74-9ac3-833161c514f8', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1922.776720] env[62510]: DEBUG oslo.service.loopingcall [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1922.778411] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1922.778411] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0002d06e-b1e2-4d76-b955-63b8989c5af1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.799812] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1922.799812] env[62510]: value = "task-1769550" [ 1922.799812] env[62510]: _type = "Task" [ 1922.799812] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.811467] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769550, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.870308] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.872416] env[62510]: DEBUG oslo_concurrency.lockutils [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] Acquired lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.872708] env[62510]: DEBUG nova.network.neutron [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Refreshing network info cache for port f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1922.892553] env[62510]: INFO nova.compute.manager [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Took 24.76 seconds to build instance. [ 1922.922924] env[62510]: DEBUG oslo_vmware.rw_handles [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ee26cb-3e12-f783-7470-6092b0e2f137/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1922.922924] env[62510]: INFO nova.virt.vmwareapi.images [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Downloaded image file data da7c8e66-0047-4492-9c76-db7e729079e0 [ 1922.923850] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab411a6-7308-48dd-ad92-8d46ca4b5b62 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.941753] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-547ac85e-196e-472f-8cba-e95064a441b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.956602] env[62510]: DEBUG nova.network.neutron [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Successfully updated port: b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1922.967865] env[62510]: INFO nova.virt.vmwareapi.images [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] The imported VM was unregistered [ 1922.970354] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Caching image {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1922.970609] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Creating directory with path [datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1922.971463] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d3eabc1-2d0e-4810-8f27-437697648eea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.980766] env[62510]: DEBUG oslo_concurrency.lockutils [None req-52f8b75d-5f36-4915-a771-446cc52c43fd tempest-ServersTestJSON-938961669 tempest-ServersTestJSON-938961669-project-member] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.579s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.984420] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.102s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.984553] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e97ebd7c-3b3e-4347-9a2a-41e2a8f4a699 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.989198] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Created directory with path [datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1922.989438] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c/OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c.vmdk to [datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0/da7c8e66-0047-4492-9c76-db7e729079e0.vmdk. {{(pid=62510) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1922.990551] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-05e4b0a6-12ae-44d1-adcc-c90f4c7a4c32 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.995796] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8ebbb6-6f6f-46c4-a302-910365790b0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.011486] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1923.011486] env[62510]: value = "task-1769552" [ 1923.011486] env[62510]: _type = "Task" [ 1923.011486] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.022023] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769552, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.310957] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769550, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.376023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-910259fa-3130-450b-9ba5-2c4ecd069649 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-98a986f8-1515-4f07-aee2-94ce84796db0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.123s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.392220] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-72f8492b-304a-4451-ab40-4cdfe36b9e19-98a986f8-1515-4f07-aee2-94ce84796db0" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.392498] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-72f8492b-304a-4451-ab40-4cdfe36b9e19-98a986f8-1515-4f07-aee2-94ce84796db0" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.392985] env[62510]: DEBUG nova.objects.instance [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'flavor' on Instance uuid 72f8492b-304a-4451-ab40-4cdfe36b9e19 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1923.397385] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24203eec-154e-44da-b573-bddb232eda11 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.276s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.398028] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.510s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.398149] env[62510]: INFO nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] During sync_power_state the instance has a pending task (spawning). Skip. [ 1923.398300] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.459407] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.459620] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.459819] env[62510]: DEBUG nova.network.neutron [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1923.503135] env[62510]: DEBUG nova.network.neutron [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Updated VIF entry in instance network info cache for port 5717b8b0-bfb7-4f74-9ac3-833161c514f8. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1923.503467] env[62510]: DEBUG nova.network.neutron [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Updating instance_info_cache with network_info: [{"id": "5717b8b0-bfb7-4f74-9ac3-833161c514f8", "address": "fa:16:3e:93:b1:3c", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5717b8b0-bf", "ovs_interfaceid": "5717b8b0-bfb7-4f74-9ac3-833161c514f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.527316] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769552, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.536241] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "8ffa27e9-6a3b-48d1-aed4-c808089788d9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.552s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.761855] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407520fd-7da0-4c38-8736-b795683d82f3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.772618] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfeb7aab-119d-40d1-82c3-438991d3a1d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.813123] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8effab-51e1-4fd4-9c16-5ce22aa5357e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.825058] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c3560f-7eaf-449a-b5fb-5434773dd6cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.828988] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769550, 'name': CreateVM_Task, 'duration_secs': 0.526783} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.829200] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1923.830331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.830496] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.830827] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1923.831125] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d036634-6dd0-4456-966e-98f27d1ea282 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.841612] env[62510]: DEBUG nova.compute.provider_tree [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1923.844331] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1923.844331] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b63bd2-8454-6c2b-db80-bf54f9b70e0a" [ 1923.844331] env[62510]: _type = "Task" [ 1923.844331] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.852804] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b63bd2-8454-6c2b-db80-bf54f9b70e0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.904129] env[62510]: DEBUG nova.network.neutron [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updated VIF entry in instance network info cache for port f1d12594-5d5a-4965-a017-3b055a432283. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1923.904475] env[62510]: DEBUG nova.network.neutron [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [{"id": "f1d12594-5d5a-4965-a017-3b055a432283", "address": "fa:16:3e:e6:3b:d1", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1d12594-5d", "ovs_interfaceid": "f1d12594-5d5a-4965-a017-3b055a432283", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.000569] env[62510]: DEBUG nova.objects.instance [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'pci_requests' on Instance uuid 72f8492b-304a-4451-ab40-4cdfe36b9e19 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1924.005624] env[62510]: DEBUG oslo_concurrency.lockutils [req-683ea907-ba19-4d9c-9b94-8c158561f9b3 req-b9a2264d-5e30-41b8-bcfa-7d72733c52e5 service nova] Releasing lock "refresh_cache-14f08e1c-bf2a-4dca-9770-8ceb311130e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.016721] env[62510]: DEBUG nova.network.neutron [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1924.025470] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769552, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.327672] env[62510]: DEBUG nova.network.neutron [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Updating instance_info_cache with network_info: [{"id": "b582dbce-50e8-4781-89ae-5c8667be6584", "address": "fa:16:3e:80:db:8b", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb582dbce-50", "ovs_interfaceid": "b582dbce-50e8-4781-89ae-5c8667be6584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.346055] env[62510]: DEBUG nova.scheduler.client.report [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1924.364635] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b63bd2-8454-6c2b-db80-bf54f9b70e0a, 'name': SearchDatastore_Task, 'duration_secs': 0.088635} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.364949] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.365227] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1924.365508] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.365693] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.365998] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1924.366562] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4af93d88-1c19-49b8-afd6-65e02a5540fb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.384628] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1924.384923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1924.385698] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d99e67a-43d9-4abf-a97f-3411c095595d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.392473] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1924.392473] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]520ea8b0-7ac0-7019-694f-682b996bb60e" [ 1924.392473] env[62510]: _type = "Task" [ 1924.392473] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.401772] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520ea8b0-7ac0-7019-694f-682b996bb60e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.407499] env[62510]: DEBUG oslo_concurrency.lockutils [req-9e0566f2-1d77-44b8-a130-7f3f78726ef4 req-57f0781c-7107-4520-9554-5edf29698201 service nova] Releasing lock "refresh_cache-0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.503480] env[62510]: DEBUG nova.objects.base [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Object Instance<72f8492b-304a-4451-ab40-4cdfe36b9e19> lazy-loaded attributes: flavor,pci_requests {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1924.504342] env[62510]: DEBUG nova.network.neutron [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1924.523390] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769552, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.628220] env[62510]: DEBUG nova.policy [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '112bb5174a71476f9aaa66e917fc135a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cca414b18f8d431786c155d359f1325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1924.830829] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.831518] env[62510]: DEBUG nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Instance network_info: |[{"id": "b582dbce-50e8-4781-89ae-5c8667be6584", "address": "fa:16:3e:80:db:8b", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb582dbce-50", "ovs_interfaceid": "b582dbce-50e8-4781-89ae-5c8667be6584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1924.831663] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:db:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6eb7e3e9-5cc2-40f1-a6eb-f70f06531667', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b582dbce-50e8-4781-89ae-5c8667be6584', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1924.840021] env[62510]: DEBUG oslo.service.loopingcall [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1924.840176] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1924.840813] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cec4a507-fb25-4670-8ea6-363f128ba8b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.857254] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.857832] env[62510]: DEBUG nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1924.860547] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.559s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.861992] env[62510]: INFO nova.compute.claims [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1924.871977] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1924.871977] env[62510]: value = "task-1769553" [ 1924.871977] env[62510]: _type = "Task" [ 1924.871977] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.885038] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769553, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.904061] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]520ea8b0-7ac0-7019-694f-682b996bb60e, 'name': SearchDatastore_Task, 'duration_secs': 0.082265} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.904974] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-684dddc1-59af-4580-8ae5-65f5d2cd4bd3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.912767] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1924.912767] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]526481fd-add3-06d7-e858-5d15f4feb2e4" [ 1924.912767] env[62510]: _type = "Task" [ 1924.912767] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.922711] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526481fd-add3-06d7-e858-5d15f4feb2e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.023307] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769552, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.048719] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.049033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.049293] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.049489] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.049662] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.052166] env[62510]: INFO nova.compute.manager [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Terminating instance [ 1925.141450] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.141774] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.142023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.142192] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.142368] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.144752] env[62510]: INFO nova.compute.manager [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Terminating instance [ 1925.197660] env[62510]: DEBUG nova.compute.manager [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1925.197869] env[62510]: DEBUG nova.compute.manager [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing instance network info cache due to event network-changed-348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1925.198341] env[62510]: DEBUG oslo_concurrency.lockutils [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.198534] env[62510]: DEBUG oslo_concurrency.lockutils [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.198664] env[62510]: DEBUG nova.network.neutron [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1925.218998] env[62510]: DEBUG nova.compute.manager [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Received event network-vif-plugged-b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1925.219249] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] Acquiring lock "11490e72-b9a5-4e8e-86c4-300c594cd914-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1925.219455] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.219660] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.220164] env[62510]: DEBUG nova.compute.manager [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] No waiting events found dispatching network-vif-plugged-b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1925.221624] env[62510]: WARNING nova.compute.manager [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Received unexpected event network-vif-plugged-b582dbce-50e8-4781-89ae-5c8667be6584 for instance with vm_state building and task_state spawning. [ 1925.221831] env[62510]: DEBUG nova.compute.manager [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Received event network-changed-b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1925.222123] env[62510]: DEBUG nova.compute.manager [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Refreshing instance network info cache due to event network-changed-b582dbce-50e8-4781-89ae-5c8667be6584. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1925.222213] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] Acquiring lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.222433] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] Acquired lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.222506] env[62510]: DEBUG nova.network.neutron [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Refreshing network info cache for port b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1925.369513] env[62510]: DEBUG nova.compute.utils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1925.371715] env[62510]: DEBUG nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1925.371916] env[62510]: DEBUG nova.network.neutron [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1925.393246] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769553, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.423062] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526481fd-add3-06d7-e858-5d15f4feb2e4, 'name': SearchDatastore_Task, 'duration_secs': 0.093485} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.423371] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.423710] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 14f08e1c-bf2a-4dca-9770-8ceb311130e3/14f08e1c-bf2a-4dca-9770-8ceb311130e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1925.423996] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5918118e-f607-4be8-b083-50f9be1340bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.431977] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1925.431977] env[62510]: value = "task-1769554" [ 1925.431977] env[62510]: _type = "Task" [ 1925.431977] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.442014] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769554, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.443675] env[62510]: DEBUG nova.policy [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec7e707afafa4638a5efd4757efbca1f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa6ed026a1264d02abe75467127bae99', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1925.524661] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769552, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.557201] env[62510]: DEBUG nova.compute.manager [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1925.557451] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1925.558450] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1434832e-39ac-489a-96d9-df26d8ffe2f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.565985] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1925.566374] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a193f079-2264-4ef1-a763-4477e6fe0bc5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.572844] env[62510]: DEBUG oslo_vmware.api [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1925.572844] env[62510]: value = "task-1769555" [ 1925.572844] env[62510]: _type = "Task" [ 1925.572844] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.584032] env[62510]: DEBUG oslo_vmware.api [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769555, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.649417] env[62510]: DEBUG nova.compute.manager [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1925.650089] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1925.651131] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5697b4bc-3008-4b6b-aa42-006603fc4f8d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.661715] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1925.662333] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c3de8f1e-398f-4588-99c9-306b3b326d1b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.668914] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1925.668914] env[62510]: value = "task-1769556" [ 1925.668914] env[62510]: _type = "Task" [ 1925.668914] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.677756] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.879021] env[62510]: DEBUG nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1925.896149] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769553, 'name': CreateVM_Task, 'duration_secs': 0.600474} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.896149] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1925.896149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.896149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.896149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1925.896149] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19e050f9-28aa-42fb-8a2c-8be8e7b57a71 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.900533] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1925.900533] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]528ce8e7-e9c4-9c5b-d987-234932b26144" [ 1925.900533] env[62510]: _type = "Task" [ 1925.900533] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.911110] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528ce8e7-e9c4-9c5b-d987-234932b26144, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.949120] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769554, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.950265] env[62510]: DEBUG nova.network.neutron [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Successfully created port: 54c12d32-3e2e-4ec3-a6a0-de7c5219efcc {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1926.027779] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769552, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.664983} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.032257] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c/OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c.vmdk to [datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0/da7c8e66-0047-4492-9c76-db7e729079e0.vmdk. [ 1926.032534] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Cleaning up location [datastore1] OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1926.032746] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_183b618f-02e2-4714-9dff-1640c570762c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1926.033096] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8281f4ab-28f1-451a-afa9-d8439893e92f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.044080] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1926.044080] env[62510]: value = "task-1769557" [ 1926.044080] env[62510]: _type = "Task" [ 1926.044080] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.056602] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.088860] env[62510]: DEBUG oslo_vmware.api [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769555, 'name': PowerOffVM_Task, 'duration_secs': 0.198024} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.089476] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1926.089701] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1926.090047] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa49b0a9-d977-4fe3-a9c6-537b527e7613 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.096749] env[62510]: DEBUG nova.network.neutron [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updated VIF entry in instance network info cache for port 348ebdec-3667-4eea-b76e-5356163db2f9. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1926.097178] env[62510]: DEBUG nova.network.neutron [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.102705] env[62510]: DEBUG nova.network.neutron [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Updated VIF entry in instance network info cache for port b582dbce-50e8-4781-89ae-5c8667be6584. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1926.103068] env[62510]: DEBUG nova.network.neutron [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Updating instance_info_cache with network_info: [{"id": "b582dbce-50e8-4781-89ae-5c8667be6584", "address": "fa:16:3e:80:db:8b", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb582dbce-50", "ovs_interfaceid": "b582dbce-50e8-4781-89ae-5c8667be6584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.182209] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.186401] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b5b587-86b7-471f-9d1c-f75fb6a45069 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.194150] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2733069e-485d-4a06-8483-ea757320283e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.226127] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fa681e-ab79-4861-9b6e-1ab207d4cd91 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.233824] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6343acf-b918-41cb-a587-afcfddc7cb54 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.252018] env[62510]: DEBUG nova.compute.provider_tree [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1926.254389] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1926.254618] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1926.254907] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleting the datastore file [datastore1] e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1926.255410] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6065e3ce-a546-4e42-a999-975f745c6915 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.262078] env[62510]: DEBUG oslo_vmware.api [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1926.262078] env[62510]: value = "task-1769559" [ 1926.262078] env[62510]: _type = "Task" [ 1926.262078] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.274057] env[62510]: DEBUG oslo_vmware.api [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.414028] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]528ce8e7-e9c4-9c5b-d987-234932b26144, 'name': SearchDatastore_Task, 'duration_secs': 0.051456} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.414383] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.414881] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1926.415158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.415314] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.415495] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1926.416031] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cddd504-7ee5-4697-ad11-b9c0afd119e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.428496] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1926.428684] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1926.429503] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87598441-52a9-4365-b0bc-b9b1cde245f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.440071] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1926.440071] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52621be7-8698-7488-1bad-7134032c9cea" [ 1926.440071] env[62510]: _type = "Task" [ 1926.440071] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.443842] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769554, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649423} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.446593] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 14f08e1c-bf2a-4dca-9770-8ceb311130e3/14f08e1c-bf2a-4dca-9770-8ceb311130e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1926.446814] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1926.447076] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7967bd8-46c9-4b19-b946-7c4a3141d8c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.453339] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52621be7-8698-7488-1bad-7134032c9cea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.456036] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1926.456036] env[62510]: value = "task-1769560" [ 1926.456036] env[62510]: _type = "Task" [ 1926.456036] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.464222] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769560, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.554051] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.053295} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.554447] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1926.554552] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0/da7c8e66-0047-4492-9c76-db7e729079e0.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.554951] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0/da7c8e66-0047-4492-9c76-db7e729079e0.vmdk to [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1926.555367] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19aecc98-6b7e-478e-8b6a-324b0e3de924 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.561443] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1926.561443] env[62510]: value = "task-1769561" [ 1926.561443] env[62510]: _type = "Task" [ 1926.561443] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.573942] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769561, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.599694] env[62510]: DEBUG oslo_concurrency.lockutils [req-5e776488-e592-41d7-a1f8-fd5ba4222c6c req-03ba8e42-9db0-49e6-9276-0871c3222447 service nova] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.606019] env[62510]: DEBUG oslo_concurrency.lockutils [req-f7c1308d-b3b5-4a8e-8708-cdaf073dbff1 req-5f1add8c-5e81-42da-a73a-8b9c319ba814 service nova] Releasing lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.677080] env[62510]: DEBUG nova.network.neutron [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Successfully updated port: 98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1926.685949] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769556, 'name': PowerOffVM_Task, 'duration_secs': 0.968677} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.685949] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1926.685949] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1926.686238] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2c417fd-321a-4908-b8f4-d447a5aaa75b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.757321] env[62510]: DEBUG nova.scheduler.client.report [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1926.767318] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1926.767533] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1926.767711] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleting the datastore file [datastore1] cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1926.768524] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81e660f0-6902-4acd-bb96-1b91eae6a29e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.773824] env[62510]: DEBUG oslo_vmware.api [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148665} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.774933] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1926.775158] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1926.775370] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1926.775555] env[62510]: INFO nova.compute.manager [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1926.775791] env[62510]: DEBUG oslo.service.loopingcall [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1926.776296] env[62510]: DEBUG nova.compute.manager [-] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1926.776355] env[62510]: DEBUG nova.network.neutron [-] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1926.779938] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for the task: (returnval){ [ 1926.779938] env[62510]: value = "task-1769563" [ 1926.779938] env[62510]: _type = "Task" [ 1926.779938] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.787663] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.893874] env[62510]: DEBUG nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1926.915271] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1926.915532] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1926.915689] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1926.915869] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1926.916053] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1926.916218] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1926.916430] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1926.916589] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1926.916754] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1926.916916] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1926.917116] env[62510]: DEBUG nova.virt.hardware [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1926.918018] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91cad89-ee83-42a3-a0f2-a53678f21890 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.926389] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0bad56-54a1-4869-ba58-3688d9f2970c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.953118] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52621be7-8698-7488-1bad-7134032c9cea, 'name': SearchDatastore_Task, 'duration_secs': 0.016354} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.953954] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ab86ddd-c5ec-4cbb-b289-6e588c0863a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.960989] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1926.960989] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525967f5-c233-718a-3b04-aaf14bb44175" [ 1926.960989] env[62510]: _type = "Task" [ 1926.960989] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.968807] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769560, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069784} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.969581] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1926.970404] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a641bdd2-0dab-4356-900e-6caf2e849979 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.975387] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525967f5-c233-718a-3b04-aaf14bb44175, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.995722] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 14f08e1c-bf2a-4dca-9770-8ceb311130e3/14f08e1c-bf2a-4dca-9770-8ceb311130e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1926.996029] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-394ad4a9-2521-430d-aa7c-b1469bc917c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.019440] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1927.019440] env[62510]: value = "task-1769564" [ 1927.019440] env[62510]: _type = "Task" [ 1927.019440] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.027412] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769564, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.075349] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769561, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.180686] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.180686] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.180686] env[62510]: DEBUG nova.network.neutron [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1927.259152] env[62510]: DEBUG nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-vif-plugged-98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1927.259551] env[62510]: DEBUG oslo_concurrency.lockutils [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.259826] env[62510]: DEBUG oslo_concurrency.lockutils [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.260078] env[62510]: DEBUG oslo_concurrency.lockutils [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.260302] env[62510]: DEBUG nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] No waiting events found dispatching network-vif-plugged-98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1927.260454] env[62510]: WARNING nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received unexpected event network-vif-plugged-98a986f8-1515-4f07-aee2-94ce84796db0 for instance with vm_state active and task_state None. [ 1927.260617] env[62510]: DEBUG nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-changed-98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1927.260811] env[62510]: DEBUG nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing instance network info cache due to event network-changed-98a986f8-1515-4f07-aee2-94ce84796db0. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1927.260941] env[62510]: DEBUG oslo_concurrency.lockutils [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.263021] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.263481] env[62510]: DEBUG nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1927.265998] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 6.844s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.266195] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.266484] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1927.266644] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.799s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.268165] env[62510]: INFO nova.compute.claims [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1927.272076] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f244e796-706c-4198-90ab-100ed54f3462 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.286982] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1ddb48-f1d2-4199-8406-9255b77ea742 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.306765] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79f90f9-f742-47c3-8884-5a768826fd6c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.309503] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.317563] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df3f324-6260-46f8-beef-4aface2058a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.353149] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179429MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1927.353341] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.473383] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525967f5-c233-718a-3b04-aaf14bb44175, 'name': SearchDatastore_Task, 'duration_secs': 0.02006} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.473808] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.474402] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 11490e72-b9a5-4e8e-86c4-300c594cd914/11490e72-b9a5-4e8e-86c4-300c594cd914.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1927.474778] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3434e83d-bde4-4795-8ff3-4ded8f4c31de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.486973] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1927.486973] env[62510]: value = "task-1769565" [ 1927.486973] env[62510]: _type = "Task" [ 1927.486973] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.498352] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.518787] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "7b2bcec4-6df7-4591-ac02-9da04d185756" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.519134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "7b2bcec4-6df7-4591-ac02-9da04d185756" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.541543] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769564, 'name': ReconfigVM_Task, 'duration_secs': 0.324821} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.541917] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 14f08e1c-bf2a-4dca-9770-8ceb311130e3/14f08e1c-bf2a-4dca-9770-8ceb311130e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1927.542725] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4cd794ec-ab04-4910-88bd-8503dba82c52 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.545410] env[62510]: DEBUG nova.network.neutron [-] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.552443] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1927.552443] env[62510]: value = "task-1769566" [ 1927.552443] env[62510]: _type = "Task" [ 1927.552443] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.565400] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769566, 'name': Rename_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.578505] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769561, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.733133] env[62510]: WARNING nova.network.neutron [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] 940be04f-b555-4383-aaf8-63734d94a773 already exists in list: networks containing: ['940be04f-b555-4383-aaf8-63734d94a773']. ignoring it [ 1927.760283] env[62510]: DEBUG nova.network.neutron [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Successfully updated port: 54c12d32-3e2e-4ec3-a6a0-de7c5219efcc {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1927.777888] env[62510]: DEBUG nova.compute.utils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1927.780655] env[62510]: DEBUG nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1927.781369] env[62510]: DEBUG nova.network.neutron [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1927.785148] env[62510]: DEBUG nova.compute.manager [req-3a5593ab-cf6d-4033-a1c5-c0dc33eff7e2 req-26a4d9dc-4cfe-43e5-95fa-74e763837c6b service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Received event network-vif-plugged-54c12d32-3e2e-4ec3-a6a0-de7c5219efcc {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1927.785362] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a5593ab-cf6d-4033-a1c5-c0dc33eff7e2 req-26a4d9dc-4cfe-43e5-95fa-74e763837c6b service nova] Acquiring lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.785699] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a5593ab-cf6d-4033-a1c5-c0dc33eff7e2 req-26a4d9dc-4cfe-43e5-95fa-74e763837c6b service nova] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.785764] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a5593ab-cf6d-4033-a1c5-c0dc33eff7e2 req-26a4d9dc-4cfe-43e5-95fa-74e763837c6b service nova] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.785881] env[62510]: DEBUG nova.compute.manager [req-3a5593ab-cf6d-4033-a1c5-c0dc33eff7e2 req-26a4d9dc-4cfe-43e5-95fa-74e763837c6b service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] No waiting events found dispatching network-vif-plugged-54c12d32-3e2e-4ec3-a6a0-de7c5219efcc {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1927.786058] env[62510]: WARNING nova.compute.manager [req-3a5593ab-cf6d-4033-a1c5-c0dc33eff7e2 req-26a4d9dc-4cfe-43e5-95fa-74e763837c6b service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Received unexpected event network-vif-plugged-54c12d32-3e2e-4ec3-a6a0-de7c5219efcc for instance with vm_state building and task_state spawning. [ 1927.799039] env[62510]: DEBUG oslo_vmware.api [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Task: {'id': task-1769563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.993221} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.799255] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1927.799444] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1927.799623] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1927.800193] env[62510]: INFO nova.compute.manager [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1927.800193] env[62510]: DEBUG oslo.service.loopingcall [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1927.800360] env[62510]: DEBUG nova.compute.manager [-] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1927.800360] env[62510]: DEBUG nova.network.neutron [-] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1927.861885] env[62510]: DEBUG nova.policy [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6decc076b3da4d1b86c6aa73f1cf2674', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86abf24d608d4c438161dc0b8335dea1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1928.000715] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.021367] env[62510]: DEBUG nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1928.050137] env[62510]: INFO nova.compute.manager [-] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Took 1.27 seconds to deallocate network for instance. [ 1928.067298] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769566, 'name': Rename_Task, 'duration_secs': 0.160348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.071539] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1928.073875] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e43c3203-87b2-4ae3-9887-cb63d0bf0a18 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.085492] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769561, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.087203] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1928.087203] env[62510]: value = "task-1769567" [ 1928.087203] env[62510]: _type = "Task" [ 1928.087203] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.097414] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769567, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.199201] env[62510]: DEBUG nova.network.neutron [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98a986f8-1515-4f07-aee2-94ce84796db0", "address": "fa:16:3e:c5:ae:21", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98a986f8-15", "ovs_interfaceid": "98a986f8-1515-4f07-aee2-94ce84796db0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.263753] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.263900] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.264195] env[62510]: DEBUG nova.network.neutron [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1928.284862] env[62510]: DEBUG nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1928.313175] env[62510]: DEBUG nova.network.neutron [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Successfully created port: 405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1928.504841] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.543346] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.561296] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.583818] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769561, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.602977] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769567, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.612724] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e857bda-3b6a-49fd-993d-5cdecd45ab36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.622529] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd70db72-fd47-42ac-b530-92f8c8e5392c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.658318] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc761207-d8e8-4ab1-8e18-d1d3312ae4e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.666823] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3cc877-9309-4dfe-a622-5649488c0c5a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.681600] env[62510]: DEBUG nova.compute.provider_tree [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1928.702256] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.703063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.703296] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.703613] env[62510]: DEBUG oslo_concurrency.lockutils [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.703811] env[62510]: DEBUG nova.network.neutron [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Refreshing network info cache for port 98a986f8-1515-4f07-aee2-94ce84796db0 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1928.706035] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92ba2d3-b0f6-4237-b3f3-1c4fb1033c0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.725989] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1928.726279] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1928.726441] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1928.726616] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1928.726761] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1928.726977] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1928.727135] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1928.727300] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1928.727465] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1928.727626] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1928.727796] env[62510]: DEBUG nova.virt.hardware [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1928.734255] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Reconfiguring VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1928.735037] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f287efb-df15-49a2-8573-44408621163e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.753223] env[62510]: DEBUG oslo_vmware.api [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1928.753223] env[62510]: value = "task-1769568" [ 1928.753223] env[62510]: _type = "Task" [ 1928.753223] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.762291] env[62510]: DEBUG oslo_vmware.api [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769568, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.768507] env[62510]: DEBUG nova.network.neutron [-] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.803781] env[62510]: DEBUG nova.network.neutron [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1928.961392] env[62510]: DEBUG nova.network.neutron [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Updating instance_info_cache with network_info: [{"id": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "address": "fa:16:3e:9f:80:f5", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c12d32-3e", "ovs_interfaceid": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.997748] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769565, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.078883] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769561, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.098030] env[62510]: DEBUG oslo_vmware.api [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769567, 'name': PowerOnVM_Task, 'duration_secs': 0.519232} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.098357] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1929.098570] env[62510]: INFO nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Took 9.06 seconds to spawn the instance on the hypervisor. [ 1929.098755] env[62510]: DEBUG nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1929.099622] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a8ab66-b266-4f5d-b30e-612d6ec4e32c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.184819] env[62510]: DEBUG nova.scheduler.client.report [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1929.263872] env[62510]: DEBUG oslo_vmware.api [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.272146] env[62510]: INFO nova.compute.manager [-] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Took 1.47 seconds to deallocate network for instance. [ 1929.297507] env[62510]: DEBUG nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1929.328326] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1929.328595] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1929.328771] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1929.328928] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1929.329129] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1929.329365] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1929.329601] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1929.329766] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1929.329937] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1929.330117] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1929.330291] env[62510]: DEBUG nova.virt.hardware [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1929.331572] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59adb00a-8d4c-44e1-a3fe-9624e1a67f13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.344156] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1338c282-e787-4781-83bc-f5cb663967ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.442257] env[62510]: DEBUG nova.network.neutron [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updated VIF entry in instance network info cache for port 98a986f8-1515-4f07-aee2-94ce84796db0. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1929.442694] env[62510]: DEBUG nova.network.neutron [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "98a986f8-1515-4f07-aee2-94ce84796db0", "address": "fa:16:3e:c5:ae:21", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98a986f8-15", "ovs_interfaceid": "98a986f8-1515-4f07-aee2-94ce84796db0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.464354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.464864] env[62510]: DEBUG nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Instance network_info: |[{"id": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "address": "fa:16:3e:9f:80:f5", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c12d32-3e", "ovs_interfaceid": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1929.466016] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:80:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd77ecbc-aaaf-45f4-ae8f-977d90e4052f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54c12d32-3e2e-4ec3-a6a0-de7c5219efcc', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1929.476613] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Creating folder: Project (fa6ed026a1264d02abe75467127bae99). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1929.477504] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3574009-1ca1-440c-8bf2-ae400f0411aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.494452] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Created folder: Project (fa6ed026a1264d02abe75467127bae99) in parent group-v367197. [ 1929.494659] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Creating folder: Instances. Parent ref: group-v367488. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1929.495317] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62cf9a50-f3db-4a33-a81f-bc343cab556f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.502409] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769565, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.505869] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Created folder: Instances in parent group-v367488. [ 1929.506184] env[62510]: DEBUG oslo.service.loopingcall [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1929.506344] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1929.506520] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf0b5c66-f056-4da6-aad7-da0e31c044b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.528846] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1929.528846] env[62510]: value = "task-1769571" [ 1929.528846] env[62510]: _type = "Task" [ 1929.528846] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.539019] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769571, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.581585] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769561, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.670188} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.581864] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da7c8e66-0047-4492-9c76-db7e729079e0/da7c8e66-0047-4492-9c76-db7e729079e0.vmdk to [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1929.582800] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01dc0da6-dfd2-4d41-a56e-be0397e78734 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.607110] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1929.607506] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08ad4684-4065-4995-8058-5846c94c8db8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.632514] env[62510]: INFO nova.compute.manager [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Took 26.95 seconds to build instance. [ 1929.633388] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1929.633388] env[62510]: value = "task-1769572" [ 1929.633388] env[62510]: _type = "Task" [ 1929.633388] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.646231] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769572, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.690323] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.423s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.690920] env[62510]: DEBUG nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1929.693625] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.340s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.763992] env[62510]: DEBUG oslo_vmware.api [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769568, 'name': ReconfigVM_Task, 'duration_secs': 0.951944} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.764635] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.764916] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Reconfigured VM to attach interface {{(pid=62510) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1929.779313] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.816647] env[62510]: DEBUG nova.compute.manager [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Received event network-changed-54c12d32-3e2e-4ec3-a6a0-de7c5219efcc {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1929.816843] env[62510]: DEBUG nova.compute.manager [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Refreshing instance network info cache due to event network-changed-54c12d32-3e2e-4ec3-a6a0-de7c5219efcc. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1929.817283] env[62510]: DEBUG oslo_concurrency.lockutils [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] Acquiring lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.817457] env[62510]: DEBUG oslo_concurrency.lockutils [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] Acquired lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.818128] env[62510]: DEBUG nova.network.neutron [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Refreshing network info cache for port 54c12d32-3e2e-4ec3-a6a0-de7c5219efcc {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1929.873427] env[62510]: DEBUG nova.network.neutron [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Successfully updated port: 405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1929.945855] env[62510]: DEBUG oslo_concurrency.lockutils [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.946219] env[62510]: DEBUG nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Received event network-vif-deleted-32ef9085-593e-45e8-b2f2-1200d914b69b {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1929.946441] env[62510]: INFO nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Neutron deleted interface 32ef9085-593e-45e8-b2f2-1200d914b69b; detaching it from the instance and deleting it from the info cache [ 1929.946651] env[62510]: DEBUG nova.network.neutron [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.002962] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769565, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.053609} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.003289] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 11490e72-b9a5-4e8e-86c4-300c594cd914/11490e72-b9a5-4e8e-86c4-300c594cd914.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1930.003838] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1930.004114] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33c58f3a-53db-4c41-97bb-94bb5c325812 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.012408] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1930.012408] env[62510]: value = "task-1769573" [ 1930.012408] env[62510]: _type = "Task" [ 1930.012408] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.020579] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.037744] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769571, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.134570] env[62510]: DEBUG oslo_concurrency.lockutils [None req-05eb2003-dda3-4a6a-8b98-8588932b89f5 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.466s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.134956] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.247s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.135168] env[62510]: INFO nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] During sync_power_state the instance has a pending task (block_device_mapping). Skip. [ 1930.135344] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.144766] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.197482] env[62510]: DEBUG nova.compute.utils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1930.206910] env[62510]: DEBUG nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1930.206910] env[62510]: DEBUG nova.network.neutron [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1930.244964] env[62510]: DEBUG nova.policy [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80f05c3e00b84277b4401aa98a253692', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bae4f0adee8c4c28add1849316448538', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1930.270285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ce766e8-7177-4778-adbb-844926a60cd9 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-72f8492b-304a-4451-ab40-4cdfe36b9e19-98a986f8-1515-4f07-aee2-94ce84796db0" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.878s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.379696] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.379852] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.379998] env[62510]: DEBUG nova.network.neutron [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.449337] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bccbea3-98bd-4a0d-98fd-f78911b394bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.461603] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a533c021-c253-455e-bd14-b39d54c6a0dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.502130] env[62510]: DEBUG nova.compute.manager [req-7d1ba5f5-354b-4cf8-9cd4-12d2405676c7 req-c884eca9-cd4c-4a4f-8ec5-24136eaf0a26 service nova] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Detach interface failed, port_id=32ef9085-593e-45e8-b2f2-1200d914b69b, reason: Instance e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1930.503412] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bc6971-411f-4905-aec6-7279c3f23ef8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.512976] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7291a163-5b82-48c5-8323-7f8693db39ab tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Suspending the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1930.516031] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-11c216eb-073e-41b1-8526-6fe42cc0c42a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.522964] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.524244] env[62510]: DEBUG oslo_vmware.api [None req-7291a163-5b82-48c5-8323-7f8693db39ab tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1930.524244] env[62510]: value = "task-1769574" [ 1930.524244] env[62510]: _type = "Task" [ 1930.524244] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.539403] env[62510]: DEBUG oslo_vmware.api [None req-7291a163-5b82-48c5-8323-7f8693db39ab tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769574, 'name': SuspendVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.543683] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769571, 'name': CreateVM_Task, 'duration_secs': 0.699717} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.543683] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1930.543683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.543683] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.543933] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1930.544127] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3590acd3-80ce-4642-83c6-4a6a6f76a909 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.549696] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1930.549696] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521fe872-c4b3-6886-5ec9-dd24a17ccc83" [ 1930.549696] env[62510]: _type = "Task" [ 1930.549696] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.558363] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521fe872-c4b3-6886-5ec9-dd24a17ccc83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.649461] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.656684] env[62510]: DEBUG nova.network.neutron [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Successfully created port: cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1930.673476] env[62510]: DEBUG nova.network.neutron [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Updated VIF entry in instance network info cache for port 54c12d32-3e2e-4ec3-a6a0-de7c5219efcc. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1930.674115] env[62510]: DEBUG nova.network.neutron [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Updating instance_info_cache with network_info: [{"id": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "address": "fa:16:3e:9f:80:f5", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c12d32-3e", "ovs_interfaceid": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1930.710027] env[62510]: DEBUG nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1930.740708] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.741099] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.741382] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 8a230335-6388-45fb-a29e-9e63ddb4d5f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 5f229f78-6c5d-4170-bdd4-c5522b137949 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 92cb4e54-a00e-4974-b134-22d302932e32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 72f8492b-304a-4451-ab40-4cdfe36b9e19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9956e5d2-edda-47af-a3df-743ebed1154b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance cf4160a8-1160-45fc-b9e5-e9526b6c1506 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1930.743046] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 14f08e1c-bf2a-4dca-9770-8ceb311130e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 11490e72-b9a5-4e8e-86c4-300c594cd914 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743046] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance a5a9c086-6ae2-4644-acfa-7c147593b8d2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743614] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance fe3b3380-69bb-4563-abf2-9f0db439d31a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.743835] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 22002fc1-647e-4e65-a5f0-c3a34575985f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1930.954865] env[62510]: DEBUG nova.network.neutron [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1931.025985] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.840464} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.029183] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1931.030022] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ceb7ed-3f70-4da0-9bd8-394a26fa21e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.039419] env[62510]: DEBUG oslo_vmware.api [None req-7291a163-5b82-48c5-8323-7f8693db39ab tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769574, 'name': SuspendVM_Task} progress is 45%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.058281] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 11490e72-b9a5-4e8e-86c4-300c594cd914/11490e72-b9a5-4e8e-86c4-300c594cd914.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1931.061437] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f96c5a9-50fa-4545-91bd-edfb03a582d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.088179] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521fe872-c4b3-6886-5ec9-dd24a17ccc83, 'name': SearchDatastore_Task, 'duration_secs': 0.022155} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.089440] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.089692] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1931.089923] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.090087] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.090268] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1931.090584] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1931.090584] env[62510]: value = "task-1769575" [ 1931.090584] env[62510]: _type = "Task" [ 1931.090584] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.090774] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-670fcd05-ed1e-4e55-b755-e1b414a5f9ad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.101847] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769575, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.108421] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1931.108653] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1931.109736] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5a6c2b0-8c4a-4c44-b16b-39ee1211c35d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.116911] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1931.116911] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cf83ab-e81a-7d1c-0a87-6c0dc61a1c83" [ 1931.116911] env[62510]: _type = "Task" [ 1931.116911] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.130277] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cf83ab-e81a-7d1c-0a87-6c0dc61a1c83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.145226] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.162766] env[62510]: DEBUG nova.network.neutron [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Updating instance_info_cache with network_info: [{"id": "405ea0bb-7824-446f-8b19-9d455a30b449", "address": "fa:16:3e:7f:82:39", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405ea0bb-78", "ovs_interfaceid": "405ea0bb-7824-446f-8b19-9d455a30b449", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.178551] env[62510]: DEBUG oslo_concurrency.lockutils [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] Releasing lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.178551] env[62510]: DEBUG nova.compute.manager [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Received event network-vif-deleted-b0bc3c6a-5184-45d0-ab4b-e1a7ae6762b8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1931.178551] env[62510]: DEBUG nova.compute.manager [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Received event network-vif-plugged-405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1931.178800] env[62510]: DEBUG oslo_concurrency.lockutils [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] Acquiring lock "fe3b3380-69bb-4563-abf2-9f0db439d31a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.178847] env[62510]: DEBUG oslo_concurrency.lockutils [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.179055] env[62510]: DEBUG oslo_concurrency.lockutils [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.179151] env[62510]: DEBUG nova.compute.manager [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] No waiting events found dispatching network-vif-plugged-405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1931.179325] env[62510]: WARNING nova.compute.manager [req-4d1c2ba9-0ffb-4f7e-ad51-2796e5507fd0 req-53a4bc1b-47ca-4921-8890-7eb50a5f932c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Received unexpected event network-vif-plugged-405ea0bb-7824-446f-8b19-9d455a30b449 for instance with vm_state building and task_state spawning. [ 1931.250892] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 7b2bcec4-6df7-4591-ac02-9da04d185756 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1931.250892] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1931.250892] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1931.506501] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dd7165-4193-4b02-8a3c-64b26864b323 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.514831] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d56d43-d109-4fbe-95a2-a75f476e715f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.550124] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabb172f-c5db-4e11-b5ec-3b08ba5c5df0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.559543] env[62510]: DEBUG oslo_vmware.api [None req-7291a163-5b82-48c5-8323-7f8693db39ab tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769574, 'name': SuspendVM_Task, 'duration_secs': 0.77992} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.561200] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c2ed95-51d8-4ebb-8fc8-f53a5a0f4911 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.564952] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-7291a163-5b82-48c5-8323-7f8693db39ab tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Suspended the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1931.565249] env[62510]: DEBUG nova.compute.manager [None req-7291a163-5b82-48c5-8323-7f8693db39ab tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1931.566102] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed11e48-f9d2-457c-8812-f8e723e8876f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.581979] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1931.602553] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769575, 'name': ReconfigVM_Task, 'duration_secs': 0.324289} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.602837] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 11490e72-b9a5-4e8e-86c4-300c594cd914/11490e72-b9a5-4e8e-86c4-300c594cd914.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1931.603477] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a84e3a89-dc78-4de5-8803-fcb2d68bc099 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.608417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "interface-72f8492b-304a-4451-ab40-4cdfe36b9e19-98a986f8-1515-4f07-aee2-94ce84796db0" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.608639] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-72f8492b-304a-4451-ab40-4cdfe36b9e19-98a986f8-1515-4f07-aee2-94ce84796db0" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.611016] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1931.611016] env[62510]: value = "task-1769576" [ 1931.611016] env[62510]: _type = "Task" [ 1931.611016] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.619803] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769576, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.628910] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cf83ab-e81a-7d1c-0a87-6c0dc61a1c83, 'name': SearchDatastore_Task, 'duration_secs': 0.011902} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.629900] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aca8b9d-b8f2-4e20-8c4e-f1662bc156c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.636351] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1931.636351] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5279a2ab-3ecf-1742-c219-c0a9ddd81f57" [ 1931.636351] env[62510]: _type = "Task" [ 1931.636351] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.647829] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769572, 'name': ReconfigVM_Task, 'duration_secs': 1.608548} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.651260] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Reconfigured VM instance instance-0000005c to attach disk [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506/cf4160a8-1160-45fc-b9e5-e9526b6c1506.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1931.651941] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5279a2ab-3ecf-1742-c219-c0a9ddd81f57, 'name': SearchDatastore_Task, 'duration_secs': 0.010369} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.652184] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-785f5826-d4a5-4685-a18f-7b30e71485c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.653837] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.654126] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/a5a9c086-6ae2-4644-acfa-7c147593b8d2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1931.654451] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63ef4388-8e6d-4ec0-82e1-6d8d3844bfd4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.662233] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1931.662233] env[62510]: value = "task-1769577" [ 1931.662233] env[62510]: _type = "Task" [ 1931.662233] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.663100] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1931.663100] env[62510]: value = "task-1769578" [ 1931.663100] env[62510]: _type = "Task" [ 1931.663100] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.666078] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.666442] env[62510]: DEBUG nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Instance network_info: |[{"id": "405ea0bb-7824-446f-8b19-9d455a30b449", "address": "fa:16:3e:7f:82:39", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405ea0bb-78", "ovs_interfaceid": "405ea0bb-7824-446f-8b19-9d455a30b449", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1931.669543] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:82:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '405ea0bb-7824-446f-8b19-9d455a30b449', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1931.677322] env[62510]: DEBUG oslo.service.loopingcall [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.677884] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1931.678456] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2aed1615-0ae0-426f-88cf-5b9b4f03b90b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.699195] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769577, 'name': Rename_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.699472] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.704874] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1931.704874] env[62510]: value = "task-1769579" [ 1931.704874] env[62510]: _type = "Task" [ 1931.704874] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.712383] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769579, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.723152] env[62510]: DEBUG nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1931.752594] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1931.752868] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1931.753057] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1931.753274] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1931.753428] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1931.753597] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1931.753827] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1931.753995] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1931.754204] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1931.754466] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1931.754693] env[62510]: DEBUG nova.virt.hardware [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1931.755578] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b5395c-e9bf-452f-9edd-e2744271bbf2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.765225] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e2c571-cb40-45d6-8698-f1b2d3cd0e5e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.841247] env[62510]: DEBUG nova.compute.manager [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Received event network-changed-405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1931.841436] env[62510]: DEBUG nova.compute.manager [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Refreshing instance network info cache due to event network-changed-405ea0bb-7824-446f-8b19-9d455a30b449. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1931.841674] env[62510]: DEBUG oslo_concurrency.lockutils [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] Acquiring lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.841825] env[62510]: DEBUG oslo_concurrency.lockutils [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] Acquired lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.841987] env[62510]: DEBUG nova.network.neutron [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Refreshing network info cache for port 405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1932.108723] env[62510]: ERROR nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [req-fa5d5d15-3c2a-433a-8439-3d975aa5de95] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fa5d5d15-3c2a-433a-8439-3d975aa5de95"}]} [ 1932.112220] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.112430] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.113433] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf7ec72-4786-4685-bbd2-66aacee0ea7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.142730] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1932.145631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28835baf-86d9-4e32-9d55-e78c302ff736 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.148831] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769576, 'name': Rename_Task, 'duration_secs': 0.138976} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.149508] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1932.150202] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb08efca-496c-42e9-8120-0f0fb0b21124 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.178139] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Reconfiguring VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1932.180810] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1932.181038] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1932.189808] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86842419-85cf-449b-83b5-4e2c6ba01c6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.203574] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1932.203574] env[62510]: value = "task-1769580" [ 1932.203574] env[62510]: _type = "Task" [ 1932.203574] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.218257] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769577, 'name': Rename_Task, 'duration_secs': 0.146488} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.218443] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542338} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.218656] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1932.218656] env[62510]: value = "task-1769581" [ 1932.218656] env[62510]: _type = "Task" [ 1932.218656] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.219448] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1932.224954] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1932.225246] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/a5a9c086-6ae2-4644-acfa-7c147593b8d2.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1932.225246] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1932.225676] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddebf77f-6ec4-4980-974b-fa8c7a371c0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.227606] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f71ac85-7bf5-4ad2-832e-8024f4e8d980 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.236046] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.241595] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769579, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.247187] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.247187] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1932.247187] env[62510]: value = "task-1769583" [ 1932.247187] env[62510]: _type = "Task" [ 1932.247187] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.247187] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1932.247187] env[62510]: value = "task-1769582" [ 1932.247187] env[62510]: _type = "Task" [ 1932.247187] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.248145] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1932.264896] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769583, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.267977] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769582, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.336419] env[62510]: DEBUG nova.network.neutron [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Successfully updated port: cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1932.537048] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b62e3c-32fd-4d50-8fef-9e9b62a8e84b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.545919] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35df1d6-3a27-49cc-a949-ac40378e3461 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.578557] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e731801a-5542-413d-9f78-fce300d3c95f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.589047] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813dbdbd-0b7b-42d5-81c9-d3f7e33c5eb8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.607671] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1932.623463] env[62510]: DEBUG nova.network.neutron [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Updated VIF entry in instance network info cache for port 405ea0bb-7824-446f-8b19-9d455a30b449. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1932.623822] env[62510]: DEBUG nova.network.neutron [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Updating instance_info_cache with network_info: [{"id": "405ea0bb-7824-446f-8b19-9d455a30b449", "address": "fa:16:3e:7f:82:39", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405ea0bb-78", "ovs_interfaceid": "405ea0bb-7824-446f-8b19-9d455a30b449", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.715989] env[62510]: DEBUG oslo_vmware.api [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769580, 'name': PowerOnVM_Task, 'duration_secs': 0.472173} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.716225] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1932.716430] env[62510]: INFO nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Took 10.21 seconds to spawn the instance on the hypervisor. [ 1932.716608] env[62510]: DEBUG nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1932.717436] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18844998-1c18-4691-9aeb-3590d6558662 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.734638] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769579, 'name': CreateVM_Task, 'duration_secs': 0.718734} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.735126] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1932.736111] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.736243] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.736585] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1932.736827] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1393f1af-56f3-489b-a10c-94e8473c1f02 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.741612] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.744581] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1932.744581] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d6f3fa-40f9-e6ca-e229-3715b04af6bc" [ 1932.744581] env[62510]: _type = "Task" [ 1932.744581] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.752128] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d6f3fa-40f9-e6ca-e229-3715b04af6bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.767013] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769583, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130419} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.774547] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1932.775399] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769582, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.776164] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3c596b-1f09-4461-904b-7d03eba801a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.801671] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/a5a9c086-6ae2-4644-acfa-7c147593b8d2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1932.802376] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-022722cb-c5d2-4206-85a0-19a78b21fb02 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.822112] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1932.822112] env[62510]: value = "task-1769584" [ 1932.822112] env[62510]: _type = "Task" [ 1932.822112] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.830213] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.840842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.840982] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.841148] env[62510]: DEBUG nova.network.neutron [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1933.010207] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.010561] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.011487] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.011832] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.012096] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.014460] env[62510]: INFO nova.compute.manager [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Terminating instance [ 1933.126719] env[62510]: DEBUG oslo_concurrency.lockutils [req-48be58e1-ef87-49dc-a743-b24faf9472b5 req-0e543beb-4596-487d-8fc4-080ee657e68c service nova] Releasing lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.140105] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 153 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1933.140485] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 153 to 154 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1933.140752] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1933.245660] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.246175] env[62510]: INFO nova.compute.manager [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Took 22.79 seconds to build instance. [ 1933.256870] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d6f3fa-40f9-e6ca-e229-3715b04af6bc, 'name': SearchDatastore_Task, 'duration_secs': 0.019808} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.260151] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.260430] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1933.260701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.260875] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.261097] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1933.261879] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffe441d7-3399-4cfa-9bb4-3bfd1e27ebde {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.269360] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769582, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.270495] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1933.270678] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1933.271419] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9350cfe3-708c-4611-928c-db9d3be90d20 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.276562] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1933.276562] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52fe1c21-db0f-7f22-d18c-e88e0d4ee81d" [ 1933.276562] env[62510]: _type = "Task" [ 1933.276562] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.285319] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fe1c21-db0f-7f22-d18c-e88e0d4ee81d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.331893] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769584, 'name': ReconfigVM_Task, 'duration_secs': 0.362983} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.332171] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Reconfigured VM instance instance-00000070 to attach disk [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/a5a9c086-6ae2-4644-acfa-7c147593b8d2.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1933.332790] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a676163-46f0-4011-9174-f0609a3b4de4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.339494] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1933.339494] env[62510]: value = "task-1769585" [ 1933.339494] env[62510]: _type = "Task" [ 1933.339494] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.347578] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769585, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.375791] env[62510]: DEBUG nova.network.neutron [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1933.518842] env[62510]: DEBUG nova.compute.manager [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1933.519055] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1933.520191] env[62510]: DEBUG nova.network.neutron [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.522478] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89896f5a-1773-4312-a332-f1ace8aa7829 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.530225] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1933.530489] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6eff6e81-dcfa-4eeb-8bdf-1a03e5fca0da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.614232] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1933.615106] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1933.615106] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleting the datastore file [datastore1] 14f08e1c-bf2a-4dca-9770-8ceb311130e3 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1933.615106] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c0b1901-f63c-4a6a-b9e8-2d3b00073cc8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.622193] env[62510]: DEBUG oslo_vmware.api [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1933.622193] env[62510]: value = "task-1769587" [ 1933.622193] env[62510]: _type = "Task" [ 1933.622193] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.629562] env[62510]: DEBUG oslo_vmware.api [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.646573] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1933.646778] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.953s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.647024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.104s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.648586] env[62510]: INFO nova.compute.claims [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1933.736762] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.751034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e28ba7c9-fa88-486f-972d-5a5bd2706e46 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.297s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.751034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.863s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.751832] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78991fba-5665-4150-a607-8bfbecd906d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.767836] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769582, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.786655] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52fe1c21-db0f-7f22-d18c-e88e0d4ee81d, 'name': SearchDatastore_Task, 'duration_secs': 0.008831} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.787438] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-606457c8-f606-4394-8180-4450e712f5d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.793658] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1933.793658] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52bf3074-f180-661e-fc08-166c49ba6513" [ 1933.793658] env[62510]: _type = "Task" [ 1933.793658] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.802968] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bf3074-f180-661e-fc08-166c49ba6513, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.853414] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769585, 'name': Rename_Task, 'duration_secs': 0.183348} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.853808] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1933.854155] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0e6daf1-0f5f-4f57-a300-5b013018403d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.860997] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1933.860997] env[62510]: value = "task-1769588" [ 1933.860997] env[62510]: _type = "Task" [ 1933.860997] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.871258] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769588, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.908244] env[62510]: DEBUG nova.compute.manager [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Received event network-vif-plugged-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1933.908463] env[62510]: DEBUG oslo_concurrency.lockutils [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] Acquiring lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.908815] env[62510]: DEBUG oslo_concurrency.lockutils [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.909142] env[62510]: DEBUG oslo_concurrency.lockutils [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.909473] env[62510]: DEBUG nova.compute.manager [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] No waiting events found dispatching network-vif-plugged-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1933.909766] env[62510]: WARNING nova.compute.manager [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Received unexpected event network-vif-plugged-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 for instance with vm_state building and task_state spawning. [ 1933.910051] env[62510]: DEBUG nova.compute.manager [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Received event network-changed-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1933.910312] env[62510]: DEBUG nova.compute.manager [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Refreshing instance network info cache due to event network-changed-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1933.910580] env[62510]: DEBUG oslo_concurrency.lockutils [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] Acquiring lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.026929] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.028296] env[62510]: DEBUG nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Instance network_info: |[{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1934.028296] env[62510]: DEBUG oslo_concurrency.lockutils [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] Acquired lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.028296] env[62510]: DEBUG nova.network.neutron [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Refreshing network info cache for port cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1934.030096] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:36:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc8e6d9b-23a8-4a82-bce2-858b46a9cf25', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1934.040963] env[62510]: DEBUG oslo.service.loopingcall [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.045334] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1934.046282] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e4a082c2-b1c0-4983-a6c1-057b7c44c0db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.077354] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1934.077354] env[62510]: value = "task-1769589" [ 1934.077354] env[62510]: _type = "Task" [ 1934.077354] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.086056] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769589, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.133055] env[62510]: DEBUG oslo_vmware.api [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164737} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.133426] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1934.133745] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1934.134083] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1934.134400] env[62510]: INFO nova.compute.manager [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1934.134786] env[62510]: DEBUG oslo.service.loopingcall [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.135106] env[62510]: DEBUG nova.compute.manager [-] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1934.135263] env[62510]: DEBUG nova.network.neutron [-] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1934.238497] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.265159] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.514s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.270874] env[62510]: DEBUG oslo_vmware.api [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769582, 'name': PowerOnVM_Task, 'duration_secs': 1.840473} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.272467] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1934.317156] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52bf3074-f180-661e-fc08-166c49ba6513, 'name': SearchDatastore_Task, 'duration_secs': 0.00997} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.317724] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.320017] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] fe3b3380-69bb-4563-abf2-9f0db439d31a/fe3b3380-69bb-4563-abf2-9f0db439d31a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1934.320017] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-03995d0f-a5aa-4fdd-8eaf-14799bd1c66d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.327119] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1934.327119] env[62510]: value = "task-1769590" [ 1934.327119] env[62510]: _type = "Task" [ 1934.327119] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.337514] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.371642] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769588, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.374042] env[62510]: DEBUG nova.network.neutron [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updated VIF entry in instance network info cache for port cc8e6d9b-23a8-4a82-bce2-858b46a9cf25. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1934.376601] env[62510]: DEBUG nova.network.neutron [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.405182] env[62510]: DEBUG nova.compute.manager [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1934.405182] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aab90ef-46e1-4a7e-ab19-12b212b71842 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.550958] env[62510]: DEBUG nova.compute.manager [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Received event network-changed-b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1934.551102] env[62510]: DEBUG nova.compute.manager [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Refreshing instance network info cache due to event network-changed-b582dbce-50e8-4781-89ae-5c8667be6584. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1934.551543] env[62510]: DEBUG oslo_concurrency.lockutils [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] Acquiring lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.551543] env[62510]: DEBUG oslo_concurrency.lockutils [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] Acquired lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.551543] env[62510]: DEBUG nova.network.neutron [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Refreshing network info cache for port b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1934.588042] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769589, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.741962] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.846827] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769590, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.875723] env[62510]: DEBUG oslo_vmware.api [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769588, 'name': PowerOnVM_Task, 'duration_secs': 0.70577} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.876205] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1934.877119] env[62510]: INFO nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Took 7.98 seconds to spawn the instance on the hypervisor. [ 1934.877379] env[62510]: DEBUG nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1934.877974] env[62510]: DEBUG oslo_concurrency.lockutils [req-5077a7ba-3d9e-4b35-8c39-ef08c650f01b req-03517b7d-61ab-47d2-955b-d24bbf736098 service nova] Releasing lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.878936] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bdbbd5-107c-40d9-824b-6dd30797cbb6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.924765] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0d11e387-9909-4043-94e7-ee949a735b25 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 36.339s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.925603] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.041s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.925815] env[62510]: INFO nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] During sync_power_state the instance has a pending task (spawning). Skip. [ 1934.926083] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.960127] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca20311-9bc6-443c-bb02-1643e0279f99 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.969725] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910483fb-a25f-41f7-807f-216ef4e9757c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.003856] env[62510]: DEBUG nova.network.neutron [-] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.006236] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22aa2027-f891-4a77-b06b-089112b57c4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.014453] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07212700-a456-4dfe-99ad-379b5e7d396f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.031344] env[62510]: DEBUG nova.compute.provider_tree [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1935.089926] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769589, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.239418] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.306939] env[62510]: DEBUG nova.network.neutron [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Updated VIF entry in instance network info cache for port b582dbce-50e8-4781-89ae-5c8667be6584. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1935.307341] env[62510]: DEBUG nova.network.neutron [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Updating instance_info_cache with network_info: [{"id": "b582dbce-50e8-4781-89ae-5c8667be6584", "address": "fa:16:3e:80:db:8b", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb582dbce-50", "ovs_interfaceid": "b582dbce-50e8-4781-89ae-5c8667be6584", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.344016] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769590, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64875} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.344276] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] fe3b3380-69bb-4563-abf2-9f0db439d31a/fe3b3380-69bb-4563-abf2-9f0db439d31a.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1935.344488] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1935.344741] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-50accb4a-76e3-44ce-8e43-e194c3c1d457 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.351810] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1935.351810] env[62510]: value = "task-1769591" [ 1935.351810] env[62510]: _type = "Task" [ 1935.351810] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.359767] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.403690] env[62510]: INFO nova.compute.manager [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Took 23.32 seconds to build instance. [ 1935.506553] env[62510]: INFO nova.compute.manager [-] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Took 1.37 seconds to deallocate network for instance. [ 1935.561379] env[62510]: DEBUG nova.scheduler.client.report [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1935.561669] env[62510]: DEBUG nova.compute.provider_tree [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 154 to 155 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1935.561855] env[62510]: DEBUG nova.compute.provider_tree [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1935.590378] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769589, 'name': CreateVM_Task, 'duration_secs': 1.50881} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.590603] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1935.591319] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.591558] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.592216] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1935.592216] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b70e432-bb8e-4405-93fb-08f8a99aeaee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.596659] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1935.596659] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f33666-8223-6c7d-ea00-d4bdf8b8c3f8" [ 1935.596659] env[62510]: _type = "Task" [ 1935.596659] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.604849] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f33666-8223-6c7d-ea00-d4bdf8b8c3f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.740040] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.809922] env[62510]: DEBUG oslo_concurrency.lockutils [req-bdafa208-fafa-4654-a2f3-b4c2780ec09a req-c86e775d-2f35-44fe-be27-490791340fac service nova] Releasing lock "refresh_cache-11490e72-b9a5-4e8e-86c4-300c594cd914" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1935.862133] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160683} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.862409] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1935.863182] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ac1730-025f-4011-9a4a-c1772e5c42a3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.885064] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] fe3b3380-69bb-4563-abf2-9f0db439d31a/fe3b3380-69bb-4563-abf2-9f0db439d31a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1935.885323] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-127e8c8b-a851-4a48-9a41-bb68c47b83ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.904440] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1935.904440] env[62510]: value = "task-1769592" [ 1935.904440] env[62510]: _type = "Task" [ 1935.904440] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.909389] env[62510]: DEBUG oslo_concurrency.lockutils [None req-be614538-2f80-468d-a702-c196c7a61dcd tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.838s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.914529] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769592, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.933386] env[62510]: DEBUG nova.compute.manager [req-f92acb65-7709-4138-ac2b-377b0c868207 req-8392aa86-89e3-4c85-b4f0-fd393c31fc6c service nova] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Received event network-vif-deleted-5717b8b0-bfb7-4f74-9ac3-833161c514f8 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1936.013890] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.066372] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.419s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.066901] env[62510]: DEBUG nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1936.070062] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.509s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.070264] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.072338] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.293s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.072523] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.074943] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.061s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.075172] env[62510]: DEBUG nova.objects.instance [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'resources' on Instance uuid 14f08e1c-bf2a-4dca-9770-8ceb311130e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1936.095393] env[62510]: INFO nova.scheduler.client.report [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted allocations for instance cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74 [ 1936.096887] env[62510]: INFO nova.scheduler.client.report [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Deleted allocations for instance e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c [ 1936.113361] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f33666-8223-6c7d-ea00-d4bdf8b8c3f8, 'name': SearchDatastore_Task, 'duration_secs': 0.041897} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.113658] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.113952] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1936.114138] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.114290] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.114468] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1936.114723] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fa42a96-7e3c-4cb7-a05f-319fe30e1365 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.123547] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1936.123950] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1936.124619] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-917880a9-79b3-4bdb-ae0b-f00450fce014 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.130248] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1936.130248] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a9cc90-91f3-0b80-146f-1d8b80b87f80" [ 1936.130248] env[62510]: _type = "Task" [ 1936.130248] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.139604] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a9cc90-91f3-0b80-146f-1d8b80b87f80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.240707] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.414813] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769592, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.573665] env[62510]: DEBUG nova.compute.utils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1936.573881] env[62510]: DEBUG nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Not allocating networking since 'none' was specified. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1936.611215] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d3c9912f-9a21-4398-9c64-97b5b80ef9a0 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.469s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.612250] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3669172f-2e90-490e-af2f-b598baaa02e5 tempest-MultipleCreateTestJSON-462701472 tempest-MultipleCreateTestJSON-462701472-project-member] Lock "e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.563s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.642929] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a9cc90-91f3-0b80-146f-1d8b80b87f80, 'name': SearchDatastore_Task, 'duration_secs': 0.009648} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.644235] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63dbb46c-d09d-4aec-b914-b5bba8a1b458 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.648268] env[62510]: INFO nova.compute.manager [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Rescuing [ 1936.648606] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.648690] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.648799] env[62510]: DEBUG nova.network.neutron [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1936.652646] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1936.652646] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52619e57-06b6-ed2c-bd1c-73d38065c2c9" [ 1936.652646] env[62510]: _type = "Task" [ 1936.652646] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.664949] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52619e57-06b6-ed2c-bd1c-73d38065c2c9, 'name': SearchDatastore_Task, 'duration_secs': 0.012314} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.665203] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.665356] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1936.665841] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c06258b0-e102-4433-807a-30d0d4d0213b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.674841] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1936.674841] env[62510]: value = "task-1769593" [ 1936.674841] env[62510]: _type = "Task" [ 1936.674841] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.684335] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.746686] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.810036] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd31be7e-2bc1-48cf-b5f1-034af9075439 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.816978] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed792a97-5899-4bb4-91c0-d2c6dd2df2e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.849059] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2766427-7ffa-40dd-a334-cb7b3e51c222 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.857816] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499e251e-a0e5-4daf-85d1-c45880af9a99 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.871262] env[62510]: DEBUG nova.compute.provider_tree [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1936.915483] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769592, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.075356] env[62510]: DEBUG nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1937.185428] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.243358] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.374022] env[62510]: DEBUG nova.scheduler.client.report [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1937.416161] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769592, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.485016] env[62510]: DEBUG nova.network.neutron [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Updating instance_info_cache with network_info: [{"id": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "address": "fa:16:3e:9f:80:f5", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c12d32-3e", "ovs_interfaceid": "54c12d32-3e2e-4ec3-a6a0-de7c5219efcc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.685643] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.743668] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.879376] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.804s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.907839] env[62510]: INFO nova.scheduler.client.report [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted allocations for instance 14f08e1c-bf2a-4dca-9770-8ceb311130e3 [ 1937.922263] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769592, 'name': ReconfigVM_Task, 'duration_secs': 1.75281} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.922564] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Reconfigured VM instance instance-00000071 to attach disk [datastore1] fe3b3380-69bb-4563-abf2-9f0db439d31a/fe3b3380-69bb-4563-abf2-9f0db439d31a.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1937.923227] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-745f02ea-b379-4d6b-b21d-299825d714bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.930077] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1937.930077] env[62510]: value = "task-1769594" [ 1937.930077] env[62510]: _type = "Task" [ 1937.930077] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.939766] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769594, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.988087] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "refresh_cache-a5a9c086-6ae2-4644-acfa-7c147593b8d2" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.089038] env[62510]: DEBUG nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1938.119412] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1938.119810] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1938.121023] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1938.121327] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1938.121916] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1938.122435] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1938.122843] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1938.123097] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1938.123384] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1938.123905] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1938.124334] env[62510]: DEBUG nova.virt.hardware [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1938.125254] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1694f34a-bfa9-4386-8a96-25a3ff9a12b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.138062] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510bdc69-a4b5-416f-bdc5-797622dde458 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.157896] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1938.165801] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Creating folder: Project (e60b4a3cdc274a08bd31dd9eb7484986). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1938.166166] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e33a0dc-5202-4476-b624-632578cf070d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.177324] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Created folder: Project (e60b4a3cdc274a08bd31dd9eb7484986) in parent group-v367197. [ 1938.177503] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Creating folder: Instances. Parent ref: group-v367493. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1938.180482] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2be05414-2da1-4f99-8f48-f6e66567b90c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.186684] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.189296] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Created folder: Instances in parent group-v367493. [ 1938.189496] env[62510]: DEBUG oslo.service.loopingcall [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1938.189684] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1938.189876] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8584d49-3f8d-4580-9989-ffd718331c6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.208166] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1938.208166] env[62510]: value = "task-1769597" [ 1938.208166] env[62510]: _type = "Task" [ 1938.208166] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.215695] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.244189] env[62510]: DEBUG oslo_vmware.api [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769581, 'name': ReconfigVM_Task, 'duration_secs': 5.87059} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.244466] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.244675] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Reconfigured VM to detach interface {{(pid=62510) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1938.420928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-425f3ce9-db22-4f1f-9ecc-6704fb60a1b0 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14f08e1c-bf2a-4dca-9770-8ceb311130e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.410s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.454076] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769594, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.687865] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.718225] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.952552] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769594, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.060462] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.060462] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.188609] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.220020] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.449186] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769594, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.472390] env[62510]: INFO nova.compute.manager [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Rebuilding instance [ 1939.518076] env[62510]: DEBUG nova.compute.manager [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1939.519063] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02066660-16aa-4f92-820e-310b73bbab81 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.524480] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1939.526352] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d067e2e5-f3c4-4d70-878c-b8cb8e838ade {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.541778] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1939.541778] env[62510]: value = "task-1769598" [ 1939.541778] env[62510]: _type = "Task" [ 1939.541778] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.554717] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.562609] env[62510]: DEBUG nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1939.684073] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.685926] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquired lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.685926] env[62510]: DEBUG nova.network.neutron [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1939.691807] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.718650] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.949830] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769594, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.047023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.047406] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.047490] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.047667] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.047845] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.052576] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.053017] env[62510]: INFO nova.compute.manager [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Terminating instance [ 1940.088258] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.088569] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.090073] env[62510]: INFO nova.compute.claims [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1940.190974] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.221257] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.451842] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769594, 'name': Rename_Task, 'duration_secs': 2.3359} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.452161] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1940.452411] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2013675-6d11-48fa-9d5d-a111e8cf505d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.460911] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1940.460911] env[62510]: value = "task-1769599" [ 1940.460911] env[62510]: _type = "Task" [ 1940.460911] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.473225] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.535230] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1940.535566] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ed7fb18-16d7-4c30-b3c7-84fc73b6e0cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.548484] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1940.548484] env[62510]: value = "task-1769600" [ 1940.548484] env[62510]: _type = "Task" [ 1940.548484] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.557171] env[62510]: DEBUG nova.compute.manager [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1940.557555] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1940.557967] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.559618] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7498745-2966-46a9-84f2-0a89f870894a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.571169] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.579598] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1940.579872] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ed210a8-4bf6-4087-9081-63b85c1800f7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.586841] env[62510]: DEBUG oslo_vmware.api [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1940.586841] env[62510]: value = "task-1769601" [ 1940.586841] env[62510]: _type = "Task" [ 1940.586841] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.598160] env[62510]: DEBUG oslo_vmware.api [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.647809] env[62510]: INFO nova.network.neutron [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Port 98a986f8-1515-4f07-aee2-94ce84796db0 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1940.648285] env[62510]: DEBUG nova.network.neutron [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [{"id": "348ebdec-3667-4eea-b76e-5356163db2f9", "address": "fa:16:3e:eb:0c:99", "network": {"id": "940be04f-b555-4383-aaf8-63734d94a773", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1337985217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.135", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cca414b18f8d431786c155d359f1325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2e0cfc48-d93b-4477-8082-69a2f7aa7701", "external-id": "nsx-vlan-transportzone-275", "segmentation_id": 275, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap348ebdec-36", "ovs_interfaceid": "348ebdec-3667-4eea-b76e-5356163db2f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.690485] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769593, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.897287} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.690848] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1940.691102] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1940.691338] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21b7c8ea-c7d8-4c3b-90d4-2b86dda0c23c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.699927] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1940.699927] env[62510]: value = "task-1769602" [ 1940.699927] env[62510]: _type = "Task" [ 1940.699927] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.709247] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.720640] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.971761] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.056559] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769598, 'name': PowerOffVM_Task, 'duration_secs': 1.169843} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.057590] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.058163] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffbb30a-d47f-44d5-9fe8-239c6224379e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.063515] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769600, 'name': PowerOffVM_Task, 'duration_secs': 0.31422} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.064166] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.064461] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1941.065248] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c609162-2bea-4c8c-b012-ff3b855a1ba7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.082337] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d3cf80-66b5-4a4f-8ad7-39146401d6d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.087034] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1941.087729] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25a7ffc2-844a-4a10-8941-31b141752220 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.103945] env[62510]: DEBUG oslo_vmware.api [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769601, 'name': PowerOffVM_Task, 'duration_secs': 0.27851} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.105027] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.105027] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1941.105027] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0d742aa-e26c-48b0-a851-646f9bf193e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.123538] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1941.123819] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-988bf757-9a88-4858-9366-3e782dd11d9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.129781] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1941.129781] env[62510]: value = "task-1769605" [ 1941.129781] env[62510]: _type = "Task" [ 1941.129781] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.141934] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.151302] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Releasing lock "refresh_cache-72f8492b-304a-4451-ab40-4cdfe36b9e19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.213065] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084853} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.215970] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1941.216663] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80cba97-1c49-437f-924a-57d4ccbfa020 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.226728] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.244560] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1941.246945] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7eda8a46-03f8-4ace-a2aa-5477e5edcbb9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.268356] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1941.268356] env[62510]: value = "task-1769606" [ 1941.268356] env[62510]: _type = "Task" [ 1941.268356] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.280812] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.355098] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c64a371-7c4f-4aec-9d95-0618511161e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.363208] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dee305b-f052-4ce1-9f2b-31b4bed7dfaf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.394133] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dbafb9-58e0-47f0-932b-be0c160756ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.402352] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012b594f-9525-4d99-bd5e-5ae12f82e834 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.417532] env[62510]: DEBUG nova.compute.provider_tree [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1941.471838] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.642939] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1941.643230] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1941.643545] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.643735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.643956] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1941.644272] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51d750ef-1e32-4805-8bfa-ff3d4188444f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.654984] env[62510]: DEBUG oslo_concurrency.lockutils [None req-83039c46-fe78-45b2-89b7-ea17ce900b0e tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "interface-72f8492b-304a-4451-ab40-4cdfe36b9e19-98a986f8-1515-4f07-aee2-94ce84796db0" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.046s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.661200] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1941.661449] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1941.662463] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90e5e38f-b84f-494b-9fb7-ed342fe3285c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.670193] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1941.670193] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f90db9-b865-5e4c-1979-9c36f02343f2" [ 1941.670193] env[62510]: _type = "Task" [ 1941.670193] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.682587] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f90db9-b865-5e4c-1979-9c36f02343f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.724079] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.778671] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.937500] env[62510]: ERROR nova.scheduler.client.report [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [req-4697727e-bef3-4a88-9130-665a4a3a33f2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4697727e-bef3-4a88-9130-665a4a3a33f2"}]} [ 1941.955670] env[62510]: DEBUG nova.scheduler.client.report [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1941.972789] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.973845] env[62510]: DEBUG nova.scheduler.client.report [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1941.974064] env[62510]: DEBUG nova.compute.provider_tree [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1941.986977] env[62510]: DEBUG nova.scheduler.client.report [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1942.017724] env[62510]: DEBUG nova.scheduler.client.report [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1942.184017] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f90db9-b865-5e4c-1979-9c36f02343f2, 'name': SearchDatastore_Task, 'duration_secs': 0.109259} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.184803] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45beb8cf-c479-4949-b84e-c970bd42b513 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.196355] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1942.196355] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521fbc36-2d9a-6183-6c66-15a191704ede" [ 1942.196355] env[62510]: _type = "Task" [ 1942.196355] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.205592] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521fbc36-2d9a-6183-6c66-15a191704ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.221334] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.242546] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105c0f62-9cb0-4812-b37d-e9689bbbd625 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.249611] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8eb454b-0c6d-4d64-a493-2a7688fb2d74 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.283265] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5f9b0d-d69c-4163-b5fb-809f4d50368a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.293711] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08b5acd-12f8-4d7a-bc37-4f6512eace3f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.297329] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.308033] env[62510]: DEBUG nova.compute.provider_tree [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1942.472963] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.707488] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521fbc36-2d9a-6183-6c66-15a191704ede, 'name': SearchDatastore_Task, 'duration_secs': 0.013787} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1942.708173] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.708173] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. {{(pid=62510) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1942.708319] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e61cb305-06ab-4aff-9262-f78fae56d70c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.717494] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1942.717494] env[62510]: value = "task-1769607" [ 1942.717494] env[62510]: _type = "Task" [ 1942.717494] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.723597] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.727988] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.789417] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.837617] env[62510]: DEBUG nova.scheduler.client.report [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 156 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1942.837865] env[62510]: DEBUG nova.compute.provider_tree [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 156 to 157 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1942.838126] env[62510]: DEBUG nova.compute.provider_tree [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1942.976292] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.230349] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.233244] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769607, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.290895] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.342843] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.254s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.343413] env[62510]: DEBUG nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1943.474328] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.727941] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.714223} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.731079] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. [ 1943.731325] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.732017] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44b018e-2163-4175-b7dd-38c0c6df3a18 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.756983] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1943.757303] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cc0506b-b1be-4208-a51c-bd6c9df3fa51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.778285] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1943.778285] env[62510]: value = "task-1769608" [ 1943.778285] env[62510]: _type = "Task" [ 1943.778285] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.789483] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769608, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.792545] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.848294] env[62510]: DEBUG nova.compute.utils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1943.849873] env[62510]: DEBUG nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1943.850062] env[62510]: DEBUG nova.network.neutron [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1943.888232] env[62510]: DEBUG nova.policy [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbc6eced57ea45fdafc3635a58fb3611', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f878b652f01c48139bfc6996e5e32f5b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1943.975428] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.124408] env[62510]: DEBUG nova.network.neutron [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Successfully created port: 9015bc32-b9ad-4846-a019-0a10e61e5218 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1944.226536] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.291382] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.294460] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.343032] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1944.343032] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1944.343032] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleting the datastore file [datastore1] 72f8492b-304a-4451-ab40-4cdfe36b9e19 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1944.343032] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d88ecf86-84fa-4024-8d6b-b283e401d436 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.345173] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1944.345407] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1944.345590] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleting the datastore file [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1944.348110] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67b9f49b-1e5a-4c0c-9759-a5bf4bdc4702 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.351469] env[62510]: DEBUG oslo_vmware.api [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1944.351469] env[62510]: value = "task-1769609" [ 1944.351469] env[62510]: _type = "Task" [ 1944.351469] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.352959] env[62510]: DEBUG nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1944.355751] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1944.355751] env[62510]: value = "task-1769610" [ 1944.355751] env[62510]: _type = "Task" [ 1944.355751] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.369282] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.372545] env[62510]: DEBUG oslo_vmware.api [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769609, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.475288] env[62510]: DEBUG oslo_vmware.api [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769599, 'name': PowerOnVM_Task, 'duration_secs': 3.890698} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.475549] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1944.475749] env[62510]: INFO nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Took 15.18 seconds to spawn the instance on the hypervisor. [ 1944.475926] env[62510]: DEBUG nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1944.476764] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb1a0d5-cb4a-4c86-8e91-fc4b13111b8c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.727122] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769597, 'name': CreateVM_Task, 'duration_secs': 6.317774} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.727316] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1944.727864] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1944.728099] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1944.728451] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1944.728728] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a76d5e2-0f35-46db-9a6d-dd951cdab5cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.734591] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1944.734591] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cc8d3b-821b-96ba-7d28-da8ce312023a" [ 1944.734591] env[62510]: _type = "Task" [ 1944.734591] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.743060] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cc8d3b-821b-96ba-7d28-da8ce312023a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.792175] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769608, 'name': ReconfigVM_Task, 'duration_secs': 0.968806} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.798019] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Reconfigured VM instance instance-00000070 to attach disk [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1944.798019] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769606, 'name': ReconfigVM_Task, 'duration_secs': 3.131109} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.798019] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd2baf8-5c55-4802-97bc-0bccdc29f66c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.799268] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1944.799867] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb50a15a-dd38-4522-a203-280b88c94d60 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.826550] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-432829b9-3e2f-4c92-a2d9-b8c3c18079dd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.837559] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1944.837559] env[62510]: value = "task-1769611" [ 1944.837559] env[62510]: _type = "Task" [ 1944.837559] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.847219] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769611, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.848640] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1944.848640] env[62510]: value = "task-1769612" [ 1944.848640] env[62510]: _type = "Task" [ 1944.848640] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.862037] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769612, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.868740] env[62510]: DEBUG oslo_vmware.api [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769609, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.873521] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.996266] env[62510]: INFO nova.compute.manager [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Took 31.71 seconds to build instance. [ 1945.208427] env[62510]: DEBUG nova.compute.manager [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Received event network-changed-405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1945.208618] env[62510]: DEBUG nova.compute.manager [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Refreshing instance network info cache due to event network-changed-405ea0bb-7824-446f-8b19-9d455a30b449. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1945.208828] env[62510]: DEBUG oslo_concurrency.lockutils [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] Acquiring lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.208969] env[62510]: DEBUG oslo_concurrency.lockutils [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] Acquired lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.209156] env[62510]: DEBUG nova.network.neutron [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Refreshing network info cache for port 405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1945.950011] env[62510]: DEBUG nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1945.952124] env[62510]: DEBUG oslo_concurrency.lockutils [None req-43884005-6916-44fc-ba48-3d3d18a0d404 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.676s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.954039] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52cc8d3b-821b-96ba-7d28-da8ce312023a, 'name': SearchDatastore_Task, 'duration_secs': 0.039591} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.954795] env[62510]: DEBUG nova.network.neutron [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Successfully updated port: 9015bc32-b9ad-4846-a019-0a10e61e5218 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1945.959757] env[62510]: DEBUG nova.compute.manager [req-9c3ba436-a91e-4051-906b-52405bd2e650 req-c54803ff-f0f7-44b9-b084-a455dd646369 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Received event network-vif-plugged-9015bc32-b9ad-4846-a019-0a10e61e5218 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1945.959757] env[62510]: DEBUG oslo_concurrency.lockutils [req-9c3ba436-a91e-4051-906b-52405bd2e650 req-c54803ff-f0f7-44b9-b084-a455dd646369 service nova] Acquiring lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.959757] env[62510]: DEBUG oslo_concurrency.lockutils [req-9c3ba436-a91e-4051-906b-52405bd2e650 req-c54803ff-f0f7-44b9-b084-a455dd646369 service nova] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.959757] env[62510]: DEBUG oslo_concurrency.lockutils [req-9c3ba436-a91e-4051-906b-52405bd2e650 req-c54803ff-f0f7-44b9-b084-a455dd646369 service nova] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.959757] env[62510]: DEBUG nova.compute.manager [req-9c3ba436-a91e-4051-906b-52405bd2e650 req-c54803ff-f0f7-44b9-b084-a455dd646369 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] No waiting events found dispatching network-vif-plugged-9015bc32-b9ad-4846-a019-0a10e61e5218 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1945.959757] env[62510]: WARNING nova.compute.manager [req-9c3ba436-a91e-4051-906b-52405bd2e650 req-c54803ff-f0f7-44b9-b084-a455dd646369 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Received unexpected event network-vif-plugged-9015bc32-b9ad-4846-a019-0a10e61e5218 for instance with vm_state building and task_state spawning. [ 1945.968803] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1945.969056] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1945.969322] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.969457] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.969641] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1945.970812] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.970928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.971083] env[62510]: DEBUG nova.network.neutron [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1945.972284] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2995a18e-b27f-4cf6-ae03-eaf070500f33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.984453] env[62510]: DEBUG oslo_vmware.api [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769609, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.654187} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.992751] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1945.992986] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1945.993158] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1945.993344] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1945.993488] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1945.993630] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1945.993836] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1945.993993] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1945.994184] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1945.994342] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1945.994520] env[62510]: DEBUG nova.virt.hardware [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1945.997434] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1945.997627] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1945.997801] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1945.997969] env[62510]: INFO nova.compute.manager [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Took 5.44 seconds to destroy the instance on the hypervisor. [ 1945.998249] env[62510]: DEBUG oslo.service.loopingcall [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1945.998752] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.616824} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.998961] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769611, 'name': Rename_Task, 'duration_secs': 0.244259} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.999207] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1945.999370] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1946.000768] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da185cc8-260a-4dc6-a6fe-bb4b6e7aef39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.003262] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769612, 'name': ReconfigVM_Task, 'duration_secs': 0.258686} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.003486] env[62510]: DEBUG nova.compute.manager [-] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1946.003565] env[62510]: DEBUG nova.network.neutron [-] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1946.005540] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1946.005728] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1946.005899] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1946.008303] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1946.008514] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79ab0f28-1ab9-4cd3-9dac-c87f6a91a84c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.010907] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1946.013184] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76a03600-6901-4542-840c-d8dcc1ac15f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.014930] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9861cce5-8a3f-42d2-9a7e-0a37c2c9350d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.023206] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f150982-c886-44b5-a264-08d4c4a43f5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.028019] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1946.028019] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52d5dd55-7837-6b02-2d1c-08137e4a093d" [ 1946.028019] env[62510]: _type = "Task" [ 1946.028019] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.030606] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1946.030606] env[62510]: value = "task-1769614" [ 1946.030606] env[62510]: _type = "Task" [ 1946.030606] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.030848] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1946.030848] env[62510]: value = "task-1769613" [ 1946.030848] env[62510]: _type = "Task" [ 1946.030848] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.054797] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52d5dd55-7837-6b02-2d1c-08137e4a093d, 'name': SearchDatastore_Task, 'duration_secs': 0.018423} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.062888] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769613, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.062888] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769614, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.062888] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfb3ecc8-8e87-4d79-8fcc-bb286207baba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.068598] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1946.068598] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]526281aa-c3e1-d158-cb73-dc070dde2d56" [ 1946.068598] env[62510]: _type = "Task" [ 1946.068598] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.077265] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526281aa-c3e1-d158-cb73-dc070dde2d56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.336777] env[62510]: DEBUG nova.network.neutron [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Updated VIF entry in instance network info cache for port 405ea0bb-7824-446f-8b19-9d455a30b449. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1946.337171] env[62510]: DEBUG nova.network.neutron [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Updating instance_info_cache with network_info: [{"id": "405ea0bb-7824-446f-8b19-9d455a30b449", "address": "fa:16:3e:7f:82:39", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap405ea0bb-78", "ovs_interfaceid": "405ea0bb-7824-446f-8b19-9d455a30b449", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.534348] env[62510]: DEBUG nova.network.neutron [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1946.553117] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769613, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.557808] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.580969] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]526281aa-c3e1-d158-cb73-dc070dde2d56, 'name': SearchDatastore_Task, 'duration_secs': 0.02015} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.581369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.581527] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1946.581866] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fddc16af-7d64-43c9-8a15-119a441d03e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.590636] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1946.590636] env[62510]: value = "task-1769615" [ 1946.590636] env[62510]: _type = "Task" [ 1946.590636] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.602265] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.822049] env[62510]: DEBUG nova.network.neutron [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance_info_cache with network_info: [{"id": "9015bc32-b9ad-4846-a019-0a10e61e5218", "address": "fa:16:3e:1a:36:04", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9015bc32-b9", "ovs_interfaceid": "9015bc32-b9ad-4846-a019-0a10e61e5218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.841668] env[62510]: DEBUG oslo_concurrency.lockutils [req-c6eaba48-37a5-42db-8306-cafed677ff9c req-e14533f1-ac5d-4f65-8a72-f31d6d13aadf service nova] Releasing lock "refresh_cache-fe3b3380-69bb-4563-abf2-9f0db439d31a" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.056600] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.058963] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1947.059239] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1947.059427] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1947.059629] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1947.059781] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1947.059927] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1947.060150] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1947.060314] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1947.060509] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1947.060679] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1947.060849] env[62510]: DEBUG nova.virt.hardware [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1947.064578] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd8f7ca6-82a5-4b88-b7c6-7510cee5295a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.067326] env[62510]: DEBUG oslo_vmware.api [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769613, 'name': PowerOnVM_Task, 'duration_secs': 0.570611} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.067582] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1947.068224] env[62510]: INFO nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Took 15.34 seconds to spawn the instance on the hypervisor. [ 1947.068224] env[62510]: DEBUG nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1947.069253] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176e0e1e-96c1-4668-8763-adaeb59abaa6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.076308] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a126b4dc-3d9c-4478-acef-e958f96eb7ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.105035] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:42:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa1b717d-79b9-457c-829a-a4e12f0187c4', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1947.113564] env[62510]: DEBUG oslo.service.loopingcall [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1947.114594] env[62510]: DEBUG nova.network.neutron [-] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.116009] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1947.116762] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95398c0b-5048-4edc-8a55-83e7dc7f995b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.135363] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769615, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.143610] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1947.143610] env[62510]: value = "task-1769617" [ 1947.143610] env[62510]: _type = "Task" [ 1947.143610] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.152651] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769617, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.324814] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1947.325325] env[62510]: DEBUG nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Instance network_info: |[{"id": "9015bc32-b9ad-4846-a019-0a10e61e5218", "address": "fa:16:3e:1a:36:04", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9015bc32-b9", "ovs_interfaceid": "9015bc32-b9ad-4846-a019-0a10e61e5218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1947.325607] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:36:04', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '191a5351-07d5-4138-b855-206f48fc4375', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9015bc32-b9ad-4846-a019-0a10e61e5218', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1947.333052] env[62510]: DEBUG oslo.service.loopingcall [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1947.333305] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1947.333570] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35d1b500-f7f1-45b2-b325-96913780c662 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.354380] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1947.354380] env[62510]: value = "task-1769618" [ 1947.354380] env[62510]: _type = "Task" [ 1947.354380] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.362361] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769618, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.550169] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.599013] env[62510]: INFO nova.compute.manager [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Took 27.15 seconds to build instance. [ 1947.604223] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.688991} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.604346] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1947.604489] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1947.604778] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00dba9cb-a5e8-4840-8e52-13b45ee20879 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.612401] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1947.612401] env[62510]: value = "task-1769619" [ 1947.612401] env[62510]: _type = "Task" [ 1947.612401] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.620197] env[62510]: INFO nova.compute.manager [-] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Took 1.62 seconds to deallocate network for instance. [ 1947.620768] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.653584] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769617, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.697383] env[62510]: DEBUG nova.compute.manager [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Received event network-changed-9015bc32-b9ad-4846-a019-0a10e61e5218 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1947.697619] env[62510]: DEBUG nova.compute.manager [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Refreshing instance network info cache due to event network-changed-9015bc32-b9ad-4846-a019-0a10e61e5218. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1947.697836] env[62510]: DEBUG oslo_concurrency.lockutils [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] Acquiring lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.697973] env[62510]: DEBUG oslo_concurrency.lockutils [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] Acquired lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.698232] env[62510]: DEBUG nova.network.neutron [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Refreshing network info cache for port 9015bc32-b9ad-4846-a019-0a10e61e5218 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1947.865614] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769618, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.048961] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769614, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.101894] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5b105d0c-7939-4a14-ade8-4fb356ee5243 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.665s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.123405] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149467} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.123405] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1948.124278] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d110f94-0874-40ef-aa14-a15a83f63eae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.128701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.128937] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.129220] env[62510]: DEBUG nova.objects.instance [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'resources' on Instance uuid 72f8492b-304a-4451-ab40-4cdfe36b9e19 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.148887] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1948.149941] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f301578-2b4c-4003-95e3-c3be30c0753b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.174216] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769617, 'name': CreateVM_Task, 'duration_secs': 0.720043} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.175470] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1948.176099] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1948.176099] env[62510]: value = "task-1769620" [ 1948.176099] env[62510]: _type = "Task" [ 1948.176099] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.177354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.177354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.177354] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1948.177534] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f3689df-b53e-4ef1-8645-e81fd41c9c1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.186145] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1948.186145] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522d970d-6f7d-b9fa-6306-ab048e18f14a" [ 1948.186145] env[62510]: _type = "Task" [ 1948.186145] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.189471] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769620, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.198331] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522d970d-6f7d-b9fa-6306-ab048e18f14a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.365919] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769618, 'name': CreateVM_Task, 'duration_secs': 0.846627} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.366071] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1948.366712] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.454725] env[62510]: DEBUG nova.network.neutron [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updated VIF entry in instance network info cache for port 9015bc32-b9ad-4846-a019-0a10e61e5218. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1948.455101] env[62510]: DEBUG nova.network.neutron [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance_info_cache with network_info: [{"id": "9015bc32-b9ad-4846-a019-0a10e61e5218", "address": "fa:16:3e:1a:36:04", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9015bc32-b9", "ovs_interfaceid": "9015bc32-b9ad-4846-a019-0a10e61e5218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.549431] env[62510]: DEBUG oslo_vmware.api [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769614, 'name': PowerOnVM_Task, 'duration_secs': 2.440937} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.549719] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1948.552538] env[62510]: DEBUG nova.compute.manager [None req-5b27817f-2f73-4038-b0ce-4250fadc7759 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1948.553307] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04aa5295-67ac-451d-9665-03232b15fda5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.691213] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769620, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.702926] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522d970d-6f7d-b9fa-6306-ab048e18f14a, 'name': SearchDatastore_Task, 'duration_secs': 0.059199} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.703269] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.703519] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1948.703755] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.703902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.704099] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1948.704457] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.704769] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1948.705006] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bc748b8-6f83-4614-93f8-5f0fc94e1694 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.707147] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1ef37ad-0c78-4663-b98e-395f1b215cc6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.713811] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1948.713811] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525c0739-066a-23b0-292a-143adb145633" [ 1948.713811] env[62510]: _type = "Task" [ 1948.713811] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.720550] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1948.720757] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1948.721978] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f74a512-7188-47d9-8d3e-0698121b539d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.727993] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525c0739-066a-23b0-292a-143adb145633, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.732510] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1948.732510] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52da0711-c137-9675-0f30-1b967c7fafc8" [ 1948.732510] env[62510]: _type = "Task" [ 1948.732510] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.743890] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52da0711-c137-9675-0f30-1b967c7fafc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.747274] env[62510]: DEBUG nova.compute.manager [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Received event network-changed-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1948.747274] env[62510]: DEBUG nova.compute.manager [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Refreshing instance network info cache due to event network-changed-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1948.747274] env[62510]: DEBUG oslo_concurrency.lockutils [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] Acquiring lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.747486] env[62510]: DEBUG oslo_concurrency.lockutils [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] Acquired lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.747524] env[62510]: DEBUG nova.network.neutron [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Refreshing network info cache for port cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1948.921137] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33387e66-c923-4aa4-b61d-098438eb758e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.931979] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a368aece-0c0c-45b1-a385-048bfa039ba6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.968866] env[62510]: DEBUG oslo_concurrency.lockutils [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] Releasing lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.969328] env[62510]: DEBUG nova.compute.manager [req-68895e08-d747-432b-b294-e7256b64e344 req-5d678bc4-13fa-40d4-8d1e-d5f07f9934c2 service nova] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Received event network-vif-deleted-348ebdec-3667-4eea-b76e-5356163db2f9 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1948.970487] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222a87e3-49e2-433f-90e9-4e6822da22fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.979985] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18443f71-dcdc-4f15-8530-2e2e82425ccd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.996465] env[62510]: DEBUG nova.compute.provider_tree [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1949.189464] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769620, 'name': ReconfigVM_Task, 'duration_secs': 0.785069} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.189805] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1949.190658] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02f92b1c-70c0-4143-ad1b-0c8d142062c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.199511] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1949.199511] env[62510]: value = "task-1769621" [ 1949.199511] env[62510]: _type = "Task" [ 1949.199511] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.208591] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769621, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.228024] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525c0739-066a-23b0-292a-143adb145633, 'name': SearchDatastore_Task, 'duration_secs': 0.013718} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.228024] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.228299] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1949.228522] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.245145] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52da0711-c137-9675-0f30-1b967c7fafc8, 'name': SearchDatastore_Task, 'duration_secs': 0.015948} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.246518] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8866d22-30a4-4b8d-b7db-99e35c3e5c09 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.257695] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1949.257695] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52e33c37-c949-42df-277e-792630f6cfba" [ 1949.257695] env[62510]: _type = "Task" [ 1949.257695] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.268492] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e33c37-c949-42df-277e-792630f6cfba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.514058] env[62510]: DEBUG nova.network.neutron [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updated VIF entry in instance network info cache for port cc8e6d9b-23a8-4a82-bce2-858b46a9cf25. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1949.514436] env[62510]: DEBUG nova.network.neutron [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1949.536559] env[62510]: DEBUG nova.scheduler.client.report [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1949.536898] env[62510]: DEBUG nova.compute.provider_tree [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 157 to 158 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1949.537535] env[62510]: DEBUG nova.compute.provider_tree [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1949.710815] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769621, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.768191] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52e33c37-c949-42df-277e-792630f6cfba, 'name': SearchDatastore_Task, 'duration_secs': 0.040035} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.768503] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.768759] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1949.769046] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.769236] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1949.769486] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b341ae06-7214-406c-bd09-dcfb05c1a7f1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.771433] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3cc893b-5321-4f62-bee5-275cb34a86ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.780768] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1949.780768] env[62510]: value = "task-1769623" [ 1949.780768] env[62510]: _type = "Task" [ 1949.780768] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.788189] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.790139] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1949.790316] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1949.790978] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d824e01e-d195-4731-89e9-a8458113ecc5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.796523] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1949.796523] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5287beaa-47b5-59e9-d4ab-4ae89f0d894c" [ 1949.796523] env[62510]: _type = "Task" [ 1949.796523] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.803739] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5287beaa-47b5-59e9-d4ab-4ae89f0d894c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.020213] env[62510]: DEBUG oslo_concurrency.lockutils [req-096e15d8-4e05-4800-b3d0-ef09e05551dd req-867a5f31-f13a-4cda-b799-7d9025e9b9b8 service nova] Releasing lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.044370] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.915s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.052972] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1950.053259] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.064415] env[62510]: INFO nova.scheduler.client.report [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted allocations for instance 72f8492b-304a-4451-ab40-4cdfe36b9e19 [ 1950.210991] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769621, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.290704] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.305407] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5287beaa-47b5-59e9-d4ab-4ae89f0d894c, 'name': SearchDatastore_Task, 'duration_secs': 0.017922} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.306162] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e43e6cbb-569b-4bb1-9353-4fa076235030 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.311088] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1950.311088] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]525d5579-07d9-771f-c3e0-4118bb42886e" [ 1950.311088] env[62510]: _type = "Task" [ 1950.311088] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.318560] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525d5579-07d9-771f-c3e0-4118bb42886e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.555871] env[62510]: DEBUG nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1950.570794] env[62510]: DEBUG oslo_concurrency.lockutils [None req-628f6381-348f-4b6f-abc6-04063a2ec75f tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "72f8492b-304a-4451-ab40-4cdfe36b9e19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.523s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.712201] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769621, 'name': Rename_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.791936] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769623, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.821076] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]525d5579-07d9-771f-c3e0-4118bb42886e, 'name': SearchDatastore_Task, 'duration_secs': 0.070643} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.821251] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.821404] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 14a54dac-d2b8-4618-86c8-ab2d08bae005/14a54dac-d2b8-4618-86c8-ab2d08bae005.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1950.821636] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d162dd90-d4c4-4aff-a5bd-c7338d5193e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.828882] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1950.828882] env[62510]: value = "task-1769624" [ 1950.828882] env[62510]: _type = "Task" [ 1950.828882] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.837671] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.079034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.079357] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.080917] env[62510]: INFO nova.compute.claims [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1951.215435] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769621, 'name': Rename_Task, 'duration_secs': 1.567022} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.215789] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1951.216152] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e350bdb-5561-4531-ad7d-46b830b6af58 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.223971] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1951.223971] env[62510]: value = "task-1769625" [ 1951.223971] env[62510]: _type = "Task" [ 1951.223971] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.237520] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.294107] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769623, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.339545] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769624, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.734425] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769625, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.744074] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.744473] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.745180] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.745467] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.745701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.749082] env[62510]: INFO nova.compute.manager [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Terminating instance [ 1951.798017] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769623, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.904301} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.798479] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1951.798765] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1951.799159] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-10d4e124-ddf7-4c5f-a611-79fa33503f1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.807647] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1951.807647] env[62510]: value = "task-1769626" [ 1951.807647] env[62510]: _type = "Task" [ 1951.807647] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.821052] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.839493] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769624, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.238380] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769625, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.255676] env[62510]: DEBUG nova.compute.manager [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1952.255948] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1952.257158] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9fa606c-6edf-4fdc-959a-a82f178299c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.265279] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1952.267787] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baa8b100-e55e-4991-9ac0-f0ffa9869406 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.275891] env[62510]: DEBUG oslo_vmware.api [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1952.275891] env[62510]: value = "task-1769627" [ 1952.275891] env[62510]: _type = "Task" [ 1952.275891] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.284950] env[62510]: DEBUG oslo_vmware.api [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769627, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.318475] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.185728} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.319622] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1952.320584] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702058d9-e3a9-4fc8-8cf6-2d7af556d3f5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.323929] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b08400-20ab-4e2a-b924-c5b8fca6e1ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.348959] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1952.352151] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42e97585-2ae9-4562-bf93-47120ba1a9b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.366966] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a289703-f29d-41a9-ab94-efa3fb979ea3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.377656] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769624, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.498184} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.402375] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 14a54dac-d2b8-4618-86c8-ab2d08bae005/14a54dac-d2b8-4618-86c8-ab2d08bae005.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1952.402647] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1952.403129] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1952.403129] env[62510]: value = "task-1769628" [ 1952.403129] env[62510]: _type = "Task" [ 1952.403129] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.403483] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-346da517-926c-4e9e-9bdc-903291abd908 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.405752] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff56af4a-55e9-4624-98b4-c8798a6f54c1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.418833] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7de1d1f-8c0d-4b70-9baf-2fdf9ccebdad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.422980] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.424656] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1952.424656] env[62510]: value = "task-1769629" [ 1952.424656] env[62510]: _type = "Task" [ 1952.424656] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.436032] env[62510]: DEBUG nova.compute.provider_tree [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1952.444261] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.735629] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769625, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.786113] env[62510]: DEBUG oslo_vmware.api [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769627, 'name': PowerOffVM_Task, 'duration_secs': 0.244229} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.786259] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1952.786388] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1952.786645] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-273ebb3b-b2f1-4cb7-a48e-326b24206d0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.918730] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.934475] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13142} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.934750] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1952.935531] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e93eb0c-f42d-4ce4-8737-25d2a67b03d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.961259] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 14a54dac-d2b8-4618-86c8-ab2d08bae005/14a54dac-d2b8-4618-86c8-ab2d08bae005.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1952.962367] env[62510]: ERROR nova.scheduler.client.report [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [req-1f98a187-c779-4d2a-9f1e-7c2d442c48cf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1f98a187-c779-4d2a-9f1e-7c2d442c48cf"}]} [ 1952.962713] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abe1ba34-941d-461f-a221-92f261e97ff0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.986054] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1952.986054] env[62510]: value = "task-1769631" [ 1952.986054] env[62510]: _type = "Task" [ 1952.986054] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.995747] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.996717] env[62510]: DEBUG nova.scheduler.client.report [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1953.011021] env[62510]: DEBUG nova.scheduler.client.report [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1953.011264] env[62510]: DEBUG nova.compute.provider_tree [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1953.022865] env[62510]: DEBUG nova.scheduler.client.report [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1953.042148] env[62510]: DEBUG nova.scheduler.client.report [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1953.090071] env[62510]: DEBUG oslo_concurrency.lockutils [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.090416] env[62510]: DEBUG oslo_concurrency.lockutils [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.237908] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769625, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.252332] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520ac1b2-1e6a-4556-a0d4-5943bdf2f881 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.260212] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9160a1-758e-4d7e-a2c1-05a4af45c0c4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.292381] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b643dca1-dae2-4950-b09e-d3be511e7dfe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.300342] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629e71a6-0a83-40c3-bc9f-e2d5f0618be9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.315270] env[62510]: DEBUG nova.compute.provider_tree [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1953.419517] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.495905] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.593682] env[62510]: INFO nova.compute.manager [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Detaching volume 31fc22b2-cf39-495c-b65c-15cd495e88de [ 1953.623879] env[62510]: INFO nova.virt.block_device [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Attempting to driver detach volume 31fc22b2-cf39-495c-b65c-15cd495e88de from mountpoint /dev/sdb [ 1953.624134] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1953.624330] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367477', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'name': 'volume-31fc22b2-cf39-495c-b65c-15cd495e88de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5f229f78-6c5d-4170-bdd4-c5522b137949', 'attached_at': '', 'detached_at': '', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'serial': '31fc22b2-cf39-495c-b65c-15cd495e88de'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1953.625244] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c674e17-2392-421f-be93-5fd7779a4f5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.647764] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b63dff1-02a2-4254-af55-e7d9fa7e7d9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.655467] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556e2f8a-4c90-458a-9fba-3ab0fb2333b9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.679125] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17911b1a-2f74-4026-b898-54ec609e4695 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.697075] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] The volume has not been displaced from its original location: [datastore1] volume-31fc22b2-cf39-495c-b65c-15cd495e88de/volume-31fc22b2-cf39-495c-b65c-15cd495e88de.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1953.702766] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfiguring VM instance instance-00000061 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1953.703169] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5686f297-868d-4459-a881-c68f5116a740 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.723313] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1953.723313] env[62510]: value = "task-1769633" [ 1953.723313] env[62510]: _type = "Task" [ 1953.723313] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.733894] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769633, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.741363] env[62510]: DEBUG oslo_vmware.api [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769625, 'name': PowerOnVM_Task, 'duration_secs': 2.184022} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.742211] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1953.742516] env[62510]: INFO nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Took 15.65 seconds to spawn the instance on the hypervisor. [ 1953.742734] env[62510]: DEBUG nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1953.743582] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c4a228-783f-47fa-8558-f84cf1df1fcf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.747217] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1953.747452] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1953.747648] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleting the datastore file [datastore1] 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1953.748448] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c750275b-9aaa-4fee-ada1-93be91164bc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.760630] env[62510]: DEBUG oslo_vmware.api [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for the task: (returnval){ [ 1953.760630] env[62510]: value = "task-1769634" [ 1953.760630] env[62510]: _type = "Task" [ 1953.760630] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.769131] env[62510]: DEBUG oslo_vmware.api [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769634, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.844999] env[62510]: DEBUG nova.scheduler.client.report [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1953.845295] env[62510]: DEBUG nova.compute.provider_tree [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 159 to 160 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1953.845477] env[62510]: DEBUG nova.compute.provider_tree [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1953.920474] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769628, 'name': ReconfigVM_Task, 'duration_secs': 1.337375} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.920792] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b/9956e5d2-edda-47af-a3df-743ebed1154b.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1953.921465] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0838b8bd-e070-40db-9187-5c2d7ff85ac9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.930094] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1953.930094] env[62510]: value = "task-1769635" [ 1953.930094] env[62510]: _type = "Task" [ 1953.930094] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.939316] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769635, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.996785] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769631, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.233619] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769633, 'name': ReconfigVM_Task, 'duration_secs': 0.425633} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.233899] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Reconfigured VM instance instance-00000061 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1954.238626] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61f031ae-2238-4033-bbdb-2ed8b77a3134 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.255271] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1954.255271] env[62510]: value = "task-1769636" [ 1954.255271] env[62510]: _type = "Task" [ 1954.255271] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.272505] env[62510]: INFO nova.compute.manager [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Took 25.75 seconds to build instance. [ 1954.274277] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769636, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.280481] env[62510]: DEBUG oslo_vmware.api [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Task: {'id': task-1769634, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402087} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.280752] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1954.280933] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1954.281138] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1954.281431] env[62510]: INFO nova.compute.manager [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Took 2.03 seconds to destroy the instance on the hypervisor. [ 1954.281659] env[62510]: DEBUG oslo.service.loopingcall [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1954.281835] env[62510]: DEBUG nova.compute.manager [-] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1954.281930] env[62510]: DEBUG nova.network.neutron [-] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1954.349788] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.270s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.350563] env[62510]: DEBUG nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1954.441223] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769635, 'name': Rename_Task, 'duration_secs': 0.204067} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.441488] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1954.441733] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e1155dc-9f93-41b0-8ce3-608da683077a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.450208] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1954.450208] env[62510]: value = "task-1769637" [ 1954.450208] env[62510]: _type = "Task" [ 1954.450208] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.458084] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.497257] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769631, 'name': ReconfigVM_Task, 'duration_secs': 1.016405} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.497505] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 14a54dac-d2b8-4618-86c8-ab2d08bae005/14a54dac-d2b8-4618-86c8-ab2d08bae005.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1954.498173] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5ebf3b5-97b1-419a-a8d8-c3852d553a64 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.505610] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1954.505610] env[62510]: value = "task-1769638" [ 1954.505610] env[62510]: _type = "Task" [ 1954.505610] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.514657] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769638, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.538605] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.538875] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.567480] env[62510]: DEBUG nova.compute.manager [req-418c8409-b5e4-4c9f-a1e8-32e7e9d973e0 req-4b39cb9f-b721-435a-bc0d-a5fb8c2c9c5f service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Received event network-vif-deleted-f1d12594-5d5a-4965-a017-3b055a432283 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1954.567685] env[62510]: INFO nova.compute.manager [req-418c8409-b5e4-4c9f-a1e8-32e7e9d973e0 req-4b39cb9f-b721-435a-bc0d-a5fb8c2c9c5f service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Neutron deleted interface f1d12594-5d5a-4965-a017-3b055a432283; detaching it from the instance and deleting it from the info cache [ 1954.567859] env[62510]: DEBUG nova.network.neutron [req-418c8409-b5e4-4c9f-a1e8-32e7e9d973e0 req-4b39cb9f-b721-435a-bc0d-a5fb8c2c9c5f service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.765781] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.774697] env[62510]: DEBUG oslo_concurrency.lockutils [None req-55a0c881-f886-4438-b30c-7706178d8a8c tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "7b2bcec4-6df7-4591-ac02-9da04d185756" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.256s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1954.855277] env[62510]: DEBUG nova.compute.utils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1954.856749] env[62510]: DEBUG nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1954.856931] env[62510]: DEBUG nova.network.neutron [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1954.902902] env[62510]: DEBUG nova.policy [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ec7e707afafa4638a5efd4757efbca1f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa6ed026a1264d02abe75467127bae99', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1954.960399] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769637, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.016149] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769638, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.041503] env[62510]: INFO nova.compute.manager [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Detaching volume da118d85-cc0e-4bcc-a73e-481ec7c76562 [ 1955.045561] env[62510]: DEBUG nova.network.neutron [-] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.070410] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1851a6cc-b1ea-4327-80d7-2d9a0c3f76bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.078904] env[62510]: INFO nova.virt.block_device [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Attempting to driver detach volume da118d85-cc0e-4bcc-a73e-481ec7c76562 from mountpoint /dev/sdb [ 1955.079142] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1955.079332] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367481', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'name': 'volume-da118d85-cc0e-4bcc-a73e-481ec7c76562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '92cb4e54-a00e-4974-b134-22d302932e32', 'attached_at': '', 'detached_at': '', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'serial': 'da118d85-cc0e-4bcc-a73e-481ec7c76562'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1955.082193] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0066f4f2-9486-4d44-b8f3-59cd8dfacab8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.092904] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031fa453-7a6f-43b8-97a2-2839715e4607 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.128585] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fea51b-3d15-4df2-ac16-c395cbb1ef48 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.131652] env[62510]: DEBUG nova.compute.manager [req-418c8409-b5e4-4c9f-a1e8-32e7e9d973e0 req-4b39cb9f-b721-435a-bc0d-a5fb8c2c9c5f service nova] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Detach interface failed, port_id=f1d12594-5d5a-4965-a017-3b055a432283, reason: Instance 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1955.136386] env[62510]: INFO nova.compute.manager [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Rebuilding instance [ 1955.140161] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c203180-41f0-4fa2-9887-cae56a23d64c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.142237] env[62510]: DEBUG nova.network.neutron [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Successfully created port: f4bf4471-4bf0-485f-80a8-2548fbf3e100 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1955.165529] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed4fd17-ad2e-42f4-a003-4041569f7bcb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.181810] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] The volume has not been displaced from its original location: [datastore1] volume-da118d85-cc0e-4bcc-a73e-481ec7c76562/volume-da118d85-cc0e-4bcc-a73e-481ec7c76562.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1955.186970] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Reconfiguring VM instance instance-00000063 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1955.190564] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec6be15f-8030-48c0-95d2-e3276bde17b6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.209424] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1955.209424] env[62510]: value = "task-1769639" [ 1955.209424] env[62510]: _type = "Task" [ 1955.209424] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.218837] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769639, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.223584] env[62510]: DEBUG nova.compute.manager [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1955.224358] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0247b5-681a-4ca3-887f-9f139c85493c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.266611] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.360169] env[62510]: DEBUG nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1955.463531] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769637, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.515935] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769638, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.551190] env[62510]: INFO nova.compute.manager [-] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Took 1.27 seconds to deallocate network for instance. [ 1955.720312] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769639, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.767222] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769636, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.961868] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769637, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.017541] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769638, 'name': Rename_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.058155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.058155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.058155] env[62510]: DEBUG nova.objects.instance [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lazy-loading 'resources' on Instance uuid 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1956.221132] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769639, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.238085] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1956.238085] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45ffa0c4-443c-4c04-9ce0-97413671ae66 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.246416] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1956.246416] env[62510]: value = "task-1769641" [ 1956.246416] env[62510]: _type = "Task" [ 1956.246416] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.255291] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.267074] env[62510]: DEBUG oslo_vmware.api [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769636, 'name': ReconfigVM_Task, 'duration_secs': 1.852144} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.267383] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367477', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'name': 'volume-31fc22b2-cf39-495c-b65c-15cd495e88de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '5f229f78-6c5d-4170-bdd4-c5522b137949', 'attached_at': '', 'detached_at': '', 'volume_id': '31fc22b2-cf39-495c-b65c-15cd495e88de', 'serial': '31fc22b2-cf39-495c-b65c-15cd495e88de'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1956.369552] env[62510]: DEBUG nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1956.398288] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1956.398520] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1956.398679] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1956.398861] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1956.399015] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1956.399192] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1956.399399] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1956.399557] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1956.399721] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1956.399884] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1956.400074] env[62510]: DEBUG nova.virt.hardware [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1956.401011] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d2a896-3436-47f4-86a5-1092b68a819a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.410034] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccfe75d-11c0-43e4-88c1-0ff594228d04 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.463349] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769637, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.518339] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769638, 'name': Rename_Task, 'duration_secs': 1.64445} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.518645] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1956.518886] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b64d31d2-e7b0-4dc3-bfd5-4381ce1938b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.525314] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1956.525314] env[62510]: value = "task-1769642" [ 1956.525314] env[62510]: _type = "Task" [ 1956.525314] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.535105] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.597036] env[62510]: DEBUG nova.compute.manager [req-8f8f5fa8-f319-4d9b-97eb-d778d8c03bee req-71d45d80-ae0e-4fcc-864a-1c822a10f364 service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Received event network-vif-plugged-f4bf4471-4bf0-485f-80a8-2548fbf3e100 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1956.597259] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f8f5fa8-f319-4d9b-97eb-d778d8c03bee req-71d45d80-ae0e-4fcc-864a-1c822a10f364 service nova] Acquiring lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.597470] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f8f5fa8-f319-4d9b-97eb-d778d8c03bee req-71d45d80-ae0e-4fcc-864a-1c822a10f364 service nova] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.597639] env[62510]: DEBUG oslo_concurrency.lockutils [req-8f8f5fa8-f319-4d9b-97eb-d778d8c03bee req-71d45d80-ae0e-4fcc-864a-1c822a10f364 service nova] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.597802] env[62510]: DEBUG nova.compute.manager [req-8f8f5fa8-f319-4d9b-97eb-d778d8c03bee req-71d45d80-ae0e-4fcc-864a-1c822a10f364 service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] No waiting events found dispatching network-vif-plugged-f4bf4471-4bf0-485f-80a8-2548fbf3e100 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1956.597964] env[62510]: WARNING nova.compute.manager [req-8f8f5fa8-f319-4d9b-97eb-d778d8c03bee req-71d45d80-ae0e-4fcc-864a-1c822a10f364 service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Received unexpected event network-vif-plugged-f4bf4471-4bf0-485f-80a8-2548fbf3e100 for instance with vm_state building and task_state spawning. [ 1956.618685] env[62510]: DEBUG nova.network.neutron [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Successfully updated port: f4bf4471-4bf0-485f-80a8-2548fbf3e100 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1956.723284] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769639, 'name': ReconfigVM_Task, 'duration_secs': 1.037147} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.723567] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Reconfigured VM instance instance-00000063 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1956.730606] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d34213a4-4aaa-4e81-bf6b-d63ca0076dbe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.746637] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1956.746637] env[62510]: value = "task-1769643" [ 1956.746637] env[62510]: _type = "Task" [ 1956.746637] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.764237] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769643, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.768583] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769641, 'name': PowerOffVM_Task, 'duration_secs': 0.233571} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.773026] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1956.773026] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1956.774426] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c66826-70bd-4579-a65b-f447c80ec8bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.783359] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1956.783663] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1834cb9a-1902-4dda-a20b-09a9c4afa763 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.812878] env[62510]: DEBUG nova.objects.instance [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lazy-loading 'flavor' on Instance uuid 5f229f78-6c5d-4170-bdd4-c5522b137949 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1956.820377] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1956.820377] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1956.820377] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Deleting the datastore file [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1956.820377] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06145ce3-18af-4481-95b7-41b5133546d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.827361] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1956.827361] env[62510]: value = "task-1769645" [ 1956.827361] env[62510]: _type = "Task" [ 1956.827361] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.837626] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.840357] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55de85c-a555-40f4-9aa1-2c7e42191f7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.847883] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d4e720-ebee-464e-88de-f41cdcadfa01 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.881705] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8c3ff1-0a1f-4275-8827-0936b55b9014 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.889976] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2c9d10-a598-4c73-9c9d-d839b55983c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.905464] env[62510]: DEBUG nova.compute.provider_tree [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1956.962252] env[62510]: DEBUG oslo_vmware.api [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769637, 'name': PowerOnVM_Task, 'duration_secs': 2.262482} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.962545] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1956.962786] env[62510]: DEBUG nova.compute.manager [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1956.963569] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c8b92e-e680-486f-84b4-364521fb611a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.036203] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769642, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.121012] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.121179] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.121340] env[62510]: DEBUG nova.network.neutron [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1957.260095] env[62510]: DEBUG oslo_vmware.api [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769643, 'name': ReconfigVM_Task, 'duration_secs': 0.249732} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.260412] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367481', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'name': 'volume-da118d85-cc0e-4bcc-a73e-481ec7c76562', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '92cb4e54-a00e-4974-b134-22d302932e32', 'attached_at': '', 'detached_at': '', 'volume_id': 'da118d85-cc0e-4bcc-a73e-481ec7c76562', 'serial': 'da118d85-cc0e-4bcc-a73e-481ec7c76562'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1957.337905] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.263131} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.338355] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1957.338454] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1957.338586] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1957.408980] env[62510]: DEBUG nova.scheduler.client.report [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1957.483126] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.536771] env[62510]: DEBUG oslo_vmware.api [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769642, 'name': PowerOnVM_Task, 'duration_secs': 0.969777} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.537244] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1957.537525] env[62510]: INFO nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Took 11.59 seconds to spawn the instance on the hypervisor. [ 1957.537722] env[62510]: DEBUG nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1957.538691] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b539f611-ed15-4944-8c74-9f9732b5221a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.652121] env[62510]: DEBUG nova.network.neutron [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1957.771899] env[62510]: DEBUG nova.network.neutron [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Updating instance_info_cache with network_info: [{"id": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "address": "fa:16:3e:e2:41:3d", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4bf4471-4b", "ovs_interfaceid": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.801435] env[62510]: DEBUG nova.objects.instance [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1957.823197] env[62510]: DEBUG oslo_concurrency.lockutils [None req-97458736-9912-4289-b43b-67d229b22525 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.733s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.914134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.856s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.916504] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.434s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.917081] env[62510]: DEBUG nova.objects.instance [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1957.933287] env[62510]: INFO nova.scheduler.client.report [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Deleted allocations for instance 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c [ 1958.057411] env[62510]: INFO nova.compute.manager [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Took 17.99 seconds to build instance. [ 1958.276126] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.276457] env[62510]: DEBUG nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Instance network_info: |[{"id": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "address": "fa:16:3e:e2:41:3d", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4bf4471-4b", "ovs_interfaceid": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1958.276837] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e2:41:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd77ecbc-aaaf-45f4-ae8f-977d90e4052f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4bf4471-4bf0-485f-80a8-2548fbf3e100', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1958.284820] env[62510]: DEBUG oslo.service.loopingcall [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1958.285723] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1958.286011] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-84cfb942-aef8-4a3b-8586-24700bf19cea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.309709] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1958.309709] env[62510]: value = "task-1769647" [ 1958.309709] env[62510]: _type = "Task" [ 1958.309709] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.320265] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769647, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.380423] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1958.380934] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1958.381114] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1958.381366] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1958.381672] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1958.381883] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1958.382124] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1958.382291] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1958.382465] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1958.382829] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1958.383027] env[62510]: DEBUG nova.virt.hardware [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1958.384187] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c0ae05-c641-4cbc-a715-c7dc9cf96805 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.393398] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1912363d-7b4d-4b42-ae7f-89a97064826b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.407754] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Instance VIF info [] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1958.413359] env[62510]: DEBUG oslo.service.loopingcall [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1958.413597] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1958.413806] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c648f35c-f20a-406a-8d4c-c7cfb1ed1937 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.434502] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1958.434502] env[62510]: value = "task-1769648" [ 1958.434502] env[62510]: _type = "Task" [ 1958.434502] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.442039] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8e1be2b0-25b3-4457-8e08-7d6c71d19aa4 tempest-AttachInterfacesTestJSON-679866724 tempest-AttachInterfacesTestJSON-679866724-project-member] Lock "0f6e9363-47ac-481e-bc1c-b8f4f9748d9c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.697s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.446095] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769648, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.560104] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1c135315-4854-41aa-94d2-c6738e124b03 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.500s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.581902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.582195] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.582425] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "5f229f78-6c5d-4170-bdd4-c5522b137949-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.582613] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.582798] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.585726] env[62510]: INFO nova.compute.manager [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Terminating instance [ 1958.631445] env[62510]: DEBUG nova.compute.manager [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Received event network-changed-f4bf4471-4bf0-485f-80a8-2548fbf3e100 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1958.631777] env[62510]: DEBUG nova.compute.manager [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Refreshing instance network info cache due to event network-changed-f4bf4471-4bf0-485f-80a8-2548fbf3e100. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1958.632035] env[62510]: DEBUG oslo_concurrency.lockutils [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] Acquiring lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.632237] env[62510]: DEBUG oslo_concurrency.lockutils [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] Acquired lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.632448] env[62510]: DEBUG nova.network.neutron [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Refreshing network info cache for port f4bf4471-4bf0-485f-80a8-2548fbf3e100 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1958.773381] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.816500] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2834c929-7fb2-44a9-a1a7-4d76b9629d76 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.277s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.817586] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.045s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.818678] env[62510]: DEBUG nova.compute.manager [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1958.822016] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21e3327-c440-46e3-bb20-0c665c69b1cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.827048] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769647, 'name': CreateVM_Task, 'duration_secs': 0.359754} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.827575] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1958.828437] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.828856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.829058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1958.829209] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad451bd2-45e1-4ead-b59f-977060b69dba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.832795] env[62510]: DEBUG nova.compute.manager [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1958.835198] env[62510]: DEBUG nova.objects.instance [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1958.838889] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1958.838889] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52585504-c4a3-f9e6-80ce-96bb859c34d1" [ 1958.838889] env[62510]: _type = "Task" [ 1958.838889] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.849957] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52585504-c4a3-f9e6-80ce-96bb859c34d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.930801] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1e8cdddb-d784-493e-801a-545614f6a1b2 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.953052] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769648, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.090227] env[62510]: DEBUG nova.compute.manager [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1959.090515] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1959.091425] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9844bf02-8b6b-45c6-9af7-8288082b7300 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.099921] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1959.099921] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc22e5dc-fa98-467c-aa67-5b3f8589a091 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.108285] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1959.108285] env[62510]: value = "task-1769649" [ 1959.108285] env[62510]: _type = "Task" [ 1959.108285] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.117522] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.358343] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52585504-c4a3-f9e6-80ce-96bb859c34d1, 'name': SearchDatastore_Task, 'duration_secs': 0.138664} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.360093] env[62510]: DEBUG nova.network.neutron [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Updated VIF entry in instance network info cache for port f4bf4471-4bf0-485f-80a8-2548fbf3e100. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1959.361723] env[62510]: DEBUG nova.network.neutron [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Updating instance_info_cache with network_info: [{"id": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "address": "fa:16:3e:e2:41:3d", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4bf4471-4b", "ovs_interfaceid": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.361723] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.361919] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1959.362289] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.362386] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.362528] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1959.363318] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57c3ca6d-9744-4d8b-9376-a331c062cc37 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.385034] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1959.385034] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1959.385034] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b3b435a-434a-43d5-99b3-3216b4b22f90 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.392466] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1959.392466] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f558b0-9946-51a5-e0ae-8f7ad95159e2" [ 1959.392466] env[62510]: _type = "Task" [ 1959.392466] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.402529] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f558b0-9946-51a5-e0ae-8f7ad95159e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.447265] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769648, 'name': CreateVM_Task, 'duration_secs': 0.890828} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.447265] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1959.449133] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.449381] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.449751] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1959.450069] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57d4b663-68ff-474a-b479-2dc9d2b0d8d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.456332] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1959.456332] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52768e07-de08-3cfb-d535-bdc4bece6fa3" [ 1959.456332] env[62510]: _type = "Task" [ 1959.456332] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.469749] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52768e07-de08-3cfb-d535-bdc4bece6fa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.619788] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.762681] env[62510]: DEBUG nova.compute.manager [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1959.849500] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1959.849894] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c94ce76-10b6-40e3-9746-d5625ff112e8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.858921] env[62510]: DEBUG oslo_vmware.api [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1959.858921] env[62510]: value = "task-1769650" [ 1959.858921] env[62510]: _type = "Task" [ 1959.858921] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.863894] env[62510]: DEBUG oslo_concurrency.lockutils [req-5747a0eb-9a0f-4580-83ea-a572703504b0 req-c0fd1e82-b03b-4a92-a910-690bda31177c service nova] Releasing lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.867710] env[62510]: DEBUG oslo_vmware.api [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769650, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.904192] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f558b0-9946-51a5-e0ae-8f7ad95159e2, 'name': SearchDatastore_Task, 'duration_secs': 0.139991} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.905026] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39463ebf-b809-495c-a3cb-1453ea0ed80c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.911430] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1959.911430] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52857591-5984-4c20-39b0-1c7243dddce5" [ 1959.911430] env[62510]: _type = "Task" [ 1959.911430] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.919713] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52857591-5984-4c20-39b0-1c7243dddce5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.966794] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52768e07-de08-3cfb-d535-bdc4bece6fa3, 'name': SearchDatastore_Task, 'duration_secs': 0.091511} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.967130] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.967373] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1959.967592] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.120056] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769649, 'name': PowerOffVM_Task, 'duration_secs': 0.605493} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.120226] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1960.121060] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1960.121060] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2499a2d8-193c-42b8-99ec-b4504873f3dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.282963] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.283517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.370386] env[62510]: DEBUG oslo_vmware.api [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769650, 'name': PowerOffVM_Task, 'duration_secs': 0.260809} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.370674] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1960.370890] env[62510]: DEBUG nova.compute.manager [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1960.371697] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65847009-92df-44fc-84b8-b6c31f67e1be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.424077] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52857591-5984-4c20-39b0-1c7243dddce5, 'name': SearchDatastore_Task, 'duration_secs': 0.025576} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.424392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.424863] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/abf15987-86cc-4fdc-be9a-efd0448ce9ca.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1960.425142] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.425346] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1960.425598] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a9efb5f-1628-40a4-b1f3-2ef9bcc6d9da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.428123] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0439f54f-b291-404e-9251-9979e19cf97f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.436368] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1960.436368] env[62510]: value = "task-1769652" [ 1960.436368] env[62510]: _type = "Task" [ 1960.436368] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.445191] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.625370] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1960.625643] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1960.626776] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6173b37-ff0f-4674-b3d5-6b249d207e73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.634149] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1960.634149] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f" [ 1960.634149] env[62510]: _type = "Task" [ 1960.634149] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.643808] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.789054] env[62510]: INFO nova.compute.claims [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1960.885136] env[62510]: DEBUG oslo_concurrency.lockutils [None req-7a05140d-ee9a-4818-8ea1-88faa16b1c86 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.067s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.949938] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.146390] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.269818] env[62510]: DEBUG nova.objects.instance [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1961.294931] env[62510]: INFO nova.compute.resource_tracker [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating resource usage from migration 42a6e31c-3e78-4d02-aef7-a77d618265e4 [ 1961.450250] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.505655] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619b88a0-a9fe-41e7-bd76-97a48becd112 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.513525] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9295e47a-779f-4c8e-ba05-fcaa219ece30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.543967] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2668e87-c083-4745-9e48-ef91127ab140 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.552135] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ff77fd-aade-41fc-ba96-b5876987e442 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.567636] env[62510]: DEBUG nova.compute.provider_tree [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.646440] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.774377] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.774550] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.774742] env[62510]: DEBUG nova.network.neutron [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1961.774922] env[62510]: DEBUG nova.objects.instance [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'info_cache' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1961.787772] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1961.787978] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1961.788223] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleting the datastore file [datastore1] 5f229f78-6c5d-4170-bdd4-c5522b137949 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1961.788505] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-719dcf85-b988-48ca-9807-c0e970046f84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.796078] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1961.796078] env[62510]: value = "task-1769653" [ 1961.796078] env[62510]: _type = "Task" [ 1961.796078] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.806288] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.948927] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.070820] env[62510]: DEBUG nova.scheduler.client.report [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1962.147987] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.279248] env[62510]: DEBUG nova.objects.base [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Object Instance<92cb4e54-a00e-4974-b134-22d302932e32> lazy-loaded attributes: flavor,info_cache {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1962.306778] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.449901] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.580754] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.297s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.581165] env[62510]: INFO nova.compute.manager [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Migrating [ 1962.647490] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.806948] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.949417] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.097406] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1963.097600] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1963.097780] env[62510]: DEBUG nova.network.neutron [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1963.152247] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.153351] env[62510]: DEBUG nova.network.neutron [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updating instance_info_cache with network_info: [{"id": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "address": "fa:16:3e:05:6e:0f", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.196", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap68c246e2-71", "ovs_interfaceid": "68c246e2-7126-4f5b-bc52-3c63f14aacf5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.309140] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.450483] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.650925] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.657285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "refresh_cache-92cb4e54-a00e-4974-b134-22d302932e32" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.809194] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.926755] env[62510]: DEBUG nova.network.neutron [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance_info_cache with network_info: [{"id": "9015bc32-b9ad-4846-a019-0a10e61e5218", "address": "fa:16:3e:1a:36:04", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9015bc32-b9", "ovs_interfaceid": "9015bc32-b9ad-4846-a019-0a10e61e5218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.956060] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.151325] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52150410-3a43-5092-d4d9-a0518368837f, 'name': SearchDatastore_Task, 'duration_secs': 3.071216} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.151907] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8575c14-d701-4945-a536-84cde5df6be7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.158067] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1964.158067] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5229928b-eb72-5dd5-06a1-651013b08dc5" [ 1964.158067] env[62510]: _type = "Task" [ 1964.158067] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.168767] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5229928b-eb72-5dd5-06a1-651013b08dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.316440] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.431483] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.453361] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.664918] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1964.664918] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc7c7375-eef4-4e0a-b900-b3ea5d3cf1f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.670669] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5229928b-eb72-5dd5-06a1-651013b08dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.677016] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 1964.677016] env[62510]: value = "task-1769654" [ 1964.677016] env[62510]: _type = "Task" [ 1964.677016] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.685803] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.810337] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.954823] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.159082] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.159082] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.175175] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5229928b-eb72-5dd5-06a1-651013b08dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.186400] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.311763] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.452432] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.670033] env[62510]: DEBUG nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1965.674772] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5229928b-eb72-5dd5-06a1-651013b08dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.688860] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.814566] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.948911] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c22725-27a4-40aa-8ce5-8672966a0602 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.972263] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.972696] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance '14a54dac-d2b8-4618-86c8-ab2d08bae005' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1966.172588] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5229928b-eb72-5dd5-06a1-651013b08dc5, 'name': SearchDatastore_Task, 'duration_secs': 1.658429} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.172859] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.173135] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1966.173403] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-500d2ee6-624b-40ad-b232-55f798bf5194 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.181913] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1966.181913] env[62510]: value = "task-1769655" [ 1966.181913] env[62510]: _type = "Task" [ 1966.181913] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.194916] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.197865] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.200379] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.200627] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.202095] env[62510]: INFO nova.compute.claims [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1966.315789] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.454712] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.479661] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1966.479967] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91bf1aff-2fa0-4fe3-9e5b-f1d46bae5067 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.489226] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1966.489226] env[62510]: value = "task-1769656" [ 1966.489226] env[62510]: _type = "Task" [ 1966.489226] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.499185] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.693941] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.699364] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.814386] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.955063] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.998435] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.193996] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.198964] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.322064] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.457033] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.463706] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78cbaa4-919b-4bea-8da3-2b6851a79b65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.472368] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1e288c-24f2-4a61-9b08-80c138c822cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.506335] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202c0ef4-7ed4-4296-9041-99326152e7e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.518636] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.518636] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bfc7c5-ba13-4f6a-a7e5-168df3a598d0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.531740] env[62510]: DEBUG nova.compute.provider_tree [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1967.694697] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.699500] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.814741] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.956647] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.011928] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.036900] env[62510]: DEBUG nova.scheduler.client.report [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1968.196928] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.201992] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.315759] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.457328] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.512477] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.541474] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.542039] env[62510]: DEBUG nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1968.698706] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.702352] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.815955] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.958816] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.014091] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769656, 'name': PowerOffVM_Task, 'duration_secs': 2.495859} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.014396] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1969.014588] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance '14a54dac-d2b8-4618-86c8-ab2d08bae005' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1969.048860] env[62510]: DEBUG nova.compute.utils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1969.051030] env[62510]: DEBUG nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1969.051030] env[62510]: DEBUG nova.network.neutron [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1969.098466] env[62510]: DEBUG nova.policy [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6eee6f5258ad443f9f14c55cb517b934', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9ab357f5a1844e7849a848cf56b6187', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1969.203591] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.207065] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.321033] env[62510]: DEBUG oslo_vmware.api [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 7.213201} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.321403] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1969.321731] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1969.322023] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1969.322440] env[62510]: INFO nova.compute.manager [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Took 10.23 seconds to destroy the instance on the hypervisor. [ 1969.322808] env[62510]: DEBUG oslo.service.loopingcall [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.323120] env[62510]: DEBUG nova.compute.manager [-] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1969.323241] env[62510]: DEBUG nova.network.neutron [-] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1969.441053] env[62510]: DEBUG nova.network.neutron [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Successfully created port: 96d4b1d9-73b7-4db5-b298-714b7378bedd {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1969.461775] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.522067] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1969.522359] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1969.522532] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1969.522850] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1969.522919] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1969.523083] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1969.523351] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1969.523528] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1969.523755] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1969.523934] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1969.524158] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1969.530197] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57c9f442-cad3-4316-961e-3c418e0b117a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.552956] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1969.552956] env[62510]: value = "task-1769657" [ 1969.552956] env[62510]: _type = "Task" [ 1969.552956] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.553689] env[62510]: DEBUG nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1969.579376] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.700215] env[62510]: DEBUG oslo_vmware.api [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769654, 'name': PowerOnVM_Task, 'duration_secs': 4.952893} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.701122] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1969.701490] env[62510]: DEBUG nova.compute.manager [None req-27e3b961-156d-4031-8527-2f0375cc49e4 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1969.702479] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35e7809-59d3-4209-b9ee-9e4778d3852d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.709579] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769655, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.402765} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.710245] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1969.710529] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1969.710780] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-80704242-d131-46c7-82ce-bdd03c23b103 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.722960] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1969.722960] env[62510]: value = "task-1769658" [ 1969.722960] env[62510]: _type = "Task" [ 1969.722960] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.732772] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769658, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.961926] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769652, 'name': CopyVirtualDisk_Task, 'duration_secs': 9.085592} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.962433] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/abf15987-86cc-4fdc-be9a-efd0448ce9ca.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1969.962815] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1969.963183] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-910df849-2459-41d6-837e-88b1a03fdca3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.972018] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1969.972018] env[62510]: value = "task-1769660" [ 1969.972018] env[62510]: _type = "Task" [ 1969.972018] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.978402] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.040862] env[62510]: DEBUG nova.compute.manager [req-75d61a29-47d5-48a9-866a-0036e2a6552e req-005c925b-931e-435b-b052-f73d05078452 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Received event network-vif-deleted-4d5c92f8-54e7-4731-bc8e-a3598f21a0b6 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1970.041615] env[62510]: INFO nova.compute.manager [req-75d61a29-47d5-48a9-866a-0036e2a6552e req-005c925b-931e-435b-b052-f73d05078452 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Neutron deleted interface 4d5c92f8-54e7-4731-bc8e-a3598f21a0b6; detaching it from the instance and deleting it from the info cache [ 1970.041615] env[62510]: DEBUG nova.network.neutron [req-75d61a29-47d5-48a9-866a-0036e2a6552e req-005c925b-931e-435b-b052-f73d05078452 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.064321] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769657, 'name': ReconfigVM_Task, 'duration_secs': 0.170834} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.064321] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance '14a54dac-d2b8-4618-86c8-ab2d08bae005' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1970.232746] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769658, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088075} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.233019] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1970.233806] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c38f804-8183-491d-9988-1084dbc98b65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.265335] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1970.265941] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0782076-5344-4fd2-9bf1-4dab9e8d55aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.288962] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1970.288962] env[62510]: value = "task-1769661" [ 1970.288962] env[62510]: _type = "Task" [ 1970.288962] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.298229] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.481369] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087849} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.481635] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1970.482409] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0e4c60-9158-4369-8c5e-adfd34095112 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.504452] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/abf15987-86cc-4fdc-be9a-efd0448ce9ca.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1970.504736] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e94dacb-10fa-440c-9cb7-527aee778363 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.519019] env[62510]: DEBUG nova.network.neutron [-] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.526828] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1970.526828] env[62510]: value = "task-1769662" [ 1970.526828] env[62510]: _type = "Task" [ 1970.526828] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.536439] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.544582] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3295eefb-38c9-4578-9dda-c812aba88352 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.553971] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04b5d01-a4b9-45a1-9dc5-b36c68f14423 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.569979] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1970.570248] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1970.570410] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1970.570599] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1970.570737] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1970.570879] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1970.571099] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1970.571264] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1970.571458] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1970.571592] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1970.571818] env[62510]: DEBUG nova.virt.hardware [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1970.577398] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Reconfiguring VM instance instance-00000074 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1970.578520] env[62510]: DEBUG nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1970.580493] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75c38f25-45f2-44b0-aff6-99d0aaee5736 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.608481] env[62510]: DEBUG nova.compute.manager [req-75d61a29-47d5-48a9-866a-0036e2a6552e req-005c925b-931e-435b-b052-f73d05078452 service nova] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Detach interface failed, port_id=4d5c92f8-54e7-4731-bc8e-a3598f21a0b6, reason: Instance 5f229f78-6c5d-4170-bdd4-c5522b137949 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1970.616111] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1970.616111] env[62510]: value = "task-1769663" [ 1970.616111] env[62510]: _type = "Task" [ 1970.616111] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.621190] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1970.621428] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1970.621586] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1970.621769] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1970.621913] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1970.622073] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1970.622284] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1970.622445] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1970.622607] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1970.622768] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1970.622938] env[62510]: DEBUG nova.virt.hardware [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1970.623719] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d2094f-7aa6-4b64-9d49-4cecbbe91f44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.632081] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769663, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.635079] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2aa0a0-0a9f-49fe-8cdc-0e70c8f17995 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.799181] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769661, 'name': ReconfigVM_Task, 'duration_secs': 0.289502} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.799588] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756/7b2bcec4-6df7-4591-ac02-9da04d185756.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1970.800206] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24a44e51-0bcf-4083-9b64-d7c4d032f14a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.807505] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1970.807505] env[62510]: value = "task-1769664" [ 1970.807505] env[62510]: _type = "Task" [ 1970.807505] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.819206] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769664, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.022340] env[62510]: INFO nova.compute.manager [-] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Took 1.70 seconds to deallocate network for instance. [ 1971.036120] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769662, 'name': ReconfigVM_Task, 'duration_secs': 0.297774} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.036439] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Reconfigured VM instance instance-00000075 to attach disk [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/abf15987-86cc-4fdc-be9a-efd0448ce9ca.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1971.037116] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20c12d7c-6b0f-4d33-9b4f-1f008a2e9a12 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.044675] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1971.044675] env[62510]: value = "task-1769665" [ 1971.044675] env[62510]: _type = "Task" [ 1971.044675] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.054640] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769665, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.126878] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769663, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.163819] env[62510]: DEBUG nova.network.neutron [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Successfully updated port: 96d4b1d9-73b7-4db5-b298-714b7378bedd {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1971.318198] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769664, 'name': Rename_Task, 'duration_secs': 0.15304} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.318576] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1971.318840] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e406b8eb-952c-4648-af47-3f732b8316e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.325864] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1971.325864] env[62510]: value = "task-1769666" [ 1971.325864] env[62510]: _type = "Task" [ 1971.325864] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.333546] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769666, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.532052] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.532359] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.532602] env[62510]: DEBUG nova.objects.instance [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lazy-loading 'resources' on Instance uuid 5f229f78-6c5d-4170-bdd4-c5522b137949 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1971.557893] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769665, 'name': Rename_Task, 'duration_secs': 0.149256} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.558313] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1971.558616] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bf009d7-6f9a-46d3-a9c5-763ff8ce6c51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.566280] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1971.566280] env[62510]: value = "task-1769667" [ 1971.566280] env[62510]: _type = "Task" [ 1971.566280] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.582312] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769667, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.627055] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769663, 'name': ReconfigVM_Task, 'duration_secs': 0.995613} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.627055] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Reconfigured VM instance instance-00000074 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1971.627595] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ced038-088f-46d6-aa37-0931bce1303e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.651692] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 14a54dac-d2b8-4618-86c8-ab2d08bae005/14a54dac-d2b8-4618-86c8-ab2d08bae005.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1971.651990] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50a873b7-4430-41a9-870d-cb8270f3bc9c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.666879] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "refresh_cache-bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.667100] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquired lock "refresh_cache-bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.667185] env[62510]: DEBUG nova.network.neutron [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1971.673184] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1971.673184] env[62510]: value = "task-1769668" [ 1971.673184] env[62510]: _type = "Task" [ 1971.673184] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1971.681747] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769668, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1971.836177] env[62510]: DEBUG oslo_vmware.api [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769666, 'name': PowerOnVM_Task, 'duration_secs': 0.434329} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1971.836544] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1971.836657] env[62510]: DEBUG nova.compute.manager [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1971.837469] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec79b73-682a-4b3a-a072-11f5bdd41832 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.082120] env[62510]: DEBUG oslo_vmware.api [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769667, 'name': PowerOnVM_Task, 'duration_secs': 0.46971} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.082572] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1972.082764] env[62510]: INFO nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Took 15.71 seconds to spawn the instance on the hypervisor. [ 1972.083073] env[62510]: DEBUG nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1972.083941] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47a72d8-f0f1-4343-8da7-195a49200a65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.133231] env[62510]: DEBUG nova.compute.manager [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Received event network-vif-plugged-96d4b1d9-73b7-4db5-b298-714b7378bedd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1972.133471] env[62510]: DEBUG oslo_concurrency.lockutils [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] Acquiring lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.133675] env[62510]: DEBUG oslo_concurrency.lockutils [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.134218] env[62510]: DEBUG oslo_concurrency.lockutils [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.134218] env[62510]: DEBUG nova.compute.manager [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] No waiting events found dispatching network-vif-plugged-96d4b1d9-73b7-4db5-b298-714b7378bedd {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1972.134218] env[62510]: WARNING nova.compute.manager [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Received unexpected event network-vif-plugged-96d4b1d9-73b7-4db5-b298-714b7378bedd for instance with vm_state building and task_state spawning. [ 1972.134472] env[62510]: DEBUG nova.compute.manager [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Received event network-changed-96d4b1d9-73b7-4db5-b298-714b7378bedd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1972.134472] env[62510]: DEBUG nova.compute.manager [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Refreshing instance network info cache due to event network-changed-96d4b1d9-73b7-4db5-b298-714b7378bedd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1972.134696] env[62510]: DEBUG oslo_concurrency.lockutils [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] Acquiring lock "refresh_cache-bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.185763] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769668, 'name': ReconfigVM_Task, 'duration_secs': 0.353052} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1972.188504] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 14a54dac-d2b8-4618-86c8-ab2d08bae005/14a54dac-d2b8-4618-86c8-ab2d08bae005.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1972.188794] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance '14a54dac-d2b8-4618-86c8-ab2d08bae005' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1972.220101] env[62510]: DEBUG nova.network.neutron [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1972.327896] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb88fcfd-9497-43a9-8146-23f924f98ce0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.336808] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d34046-a31d-4b1c-a939-068c813b94b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.377034] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83be932e-9273-4c64-b0d3-62adb784e20a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.378379] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.383881] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b46a94-25d9-4552-802a-d2c5ccbf9657 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.397904] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.398176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.398421] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.398611] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.398780] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.400539] env[62510]: DEBUG nova.compute.provider_tree [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1972.401967] env[62510]: INFO nova.compute.manager [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Terminating instance [ 1972.404950] env[62510]: DEBUG nova.network.neutron [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Updating instance_info_cache with network_info: [{"id": "96d4b1d9-73b7-4db5-b298-714b7378bedd", "address": "fa:16:3e:5c:47:ad", "network": {"id": "a41bfcd7-1627-4a16-8977-c23f1762d909", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1396205237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ab357f5a1844e7849a848cf56b6187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96d4b1d9-73", "ovs_interfaceid": "96d4b1d9-73b7-4db5-b298-714b7378bedd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1972.606865] env[62510]: INFO nova.compute.manager [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Took 21.55 seconds to build instance. [ 1972.697937] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986682b7-72ca-4c09-82bf-bae3e9eb7a51 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.701517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "7b2bcec4-6df7-4591-ac02-9da04d185756" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.701755] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "7b2bcec4-6df7-4591-ac02-9da04d185756" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.701957] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "7b2bcec4-6df7-4591-ac02-9da04d185756-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.702155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "7b2bcec4-6df7-4591-ac02-9da04d185756-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.702376] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "7b2bcec4-6df7-4591-ac02-9da04d185756-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.707971] env[62510]: INFO nova.compute.manager [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Terminating instance [ 1972.725296] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a330e2a-8432-477a-b019-fbc968dc1271 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.745975] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance '14a54dac-d2b8-4618-86c8-ab2d08bae005' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1972.907505] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Releasing lock "refresh_cache-bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.907889] env[62510]: DEBUG nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Instance network_info: |[{"id": "96d4b1d9-73b7-4db5-b298-714b7378bedd", "address": "fa:16:3e:5c:47:ad", "network": {"id": "a41bfcd7-1627-4a16-8977-c23f1762d909", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1396205237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ab357f5a1844e7849a848cf56b6187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96d4b1d9-73", "ovs_interfaceid": "96d4b1d9-73b7-4db5-b298-714b7378bedd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1972.908103] env[62510]: DEBUG oslo_concurrency.lockutils [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] Acquired lock "refresh_cache-bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.908358] env[62510]: DEBUG nova.network.neutron [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Refreshing network info cache for port 96d4b1d9-73b7-4db5-b298-714b7378bedd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1972.909616] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:47:ad', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96d4b1d9-73b7-4db5-b298-714b7378bedd', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1972.917457] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Creating folder: Project (b9ab357f5a1844e7849a848cf56b6187). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1972.918927] env[62510]: DEBUG nova.compute.manager [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1972.919162] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1972.919431] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f715c369-fa53-4c86-8922-d1cb7a223cb4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.921712] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0fb206-ce6d-4535-be2b-2589191689de {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.927187] env[62510]: ERROR nova.scheduler.client.report [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [req-637acc9a-abb3-4219-9555-2dcab6dfb2f9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-637acc9a-abb3-4219-9555-2dcab6dfb2f9"}]} [ 1972.936825] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1972.937085] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4899d307-de98-4d7d-926f-4301ef4c4775 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.945189] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Created folder: Project (b9ab357f5a1844e7849a848cf56b6187) in parent group-v367197. [ 1972.945419] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Creating folder: Instances. Parent ref: group-v367502. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1972.946397] env[62510]: DEBUG nova.scheduler.client.report [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1972.949309] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63e11249-46cc-4e06-94a0-d3f2897d4aff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.951338] env[62510]: DEBUG oslo_vmware.api [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1972.951338] env[62510]: value = "task-1769671" [ 1972.951338] env[62510]: _type = "Task" [ 1972.951338] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.960089] env[62510]: DEBUG oslo_vmware.api [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.962701] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Created folder: Instances in parent group-v367502. [ 1972.962986] env[62510]: DEBUG oslo.service.loopingcall [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1972.963124] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1972.963373] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2085c4f3-e654-42b1-ac70-f782384ac8d1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.978869] env[62510]: DEBUG nova.scheduler.client.report [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1972.979088] env[62510]: DEBUG nova.compute.provider_tree [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1972.987274] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1972.987274] env[62510]: value = "task-1769673" [ 1972.987274] env[62510]: _type = "Task" [ 1972.987274] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.993509] env[62510]: DEBUG nova.scheduler.client.report [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1972.999636] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769673, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.016747] env[62510]: DEBUG nova.scheduler.client.report [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1973.067535] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "11490e72-b9a5-4e8e-86c4-300c594cd914" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.067833] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.068075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "11490e72-b9a5-4e8e-86c4-300c594cd914-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.068290] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.068514] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.073961] env[62510]: INFO nova.compute.manager [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Terminating instance [ 1973.108940] env[62510]: DEBUG oslo_concurrency.lockutils [None req-78267074-08a6-47b4-8579-f8396aee84a4 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.055s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.230585] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "refresh_cache-7b2bcec4-6df7-4591-ac02-9da04d185756" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.230764] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquired lock "refresh_cache-7b2bcec4-6df7-4591-ac02-9da04d185756" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.230969] env[62510]: DEBUG nova.network.neutron [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.290023] env[62510]: DEBUG nova.network.neutron [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Port 9015bc32-b9ad-4846-a019-0a10e61e5218 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1973.299847] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b0a27a-fa7f-4d77-a5a5-0a9361f63f44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.310043] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7006abba-798b-47c7-8cac-6fc2f2a30c1c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.344065] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f463e7b1-f184-44e7-b414-edd879960bdc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.356421] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e9e198-6ea6-47e5-a0ee-71f2b2d7e126 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.374592] env[62510]: DEBUG nova.compute.provider_tree [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1973.462720] env[62510]: DEBUG oslo_vmware.api [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769671, 'name': PowerOffVM_Task, 'duration_secs': 0.282496} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.463085] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1973.463339] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1973.463617] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afcf8b5f-6340-4478-9497-f77a09bb8b8c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.498240] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769673, 'name': CreateVM_Task, 'duration_secs': 0.493843} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.498442] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1973.499181] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.499397] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.499794] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1973.500082] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-661ec864-1192-430e-9b55-5837796a4db2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.506032] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1973.506032] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5231ce39-8acf-b5ea-51d4-01e85c1416d0" [ 1973.506032] env[62510]: _type = "Task" [ 1973.506032] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.518084] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5231ce39-8acf-b5ea-51d4-01e85c1416d0, 'name': SearchDatastore_Task, 'duration_secs': 0.010034} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1973.518385] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1973.518627] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1973.518853] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.518999] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.519192] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1973.519522] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7046b469-2b4d-4b7b-8344-2fd4268b92c8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.536749] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1973.537064] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1973.538141] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab4c0ad3-7376-4527-866c-c1361fd60e9d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.543751] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1973.543751] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522c4752-db3d-982a-cd17-ad9a9b621ecb" [ 1973.543751] env[62510]: _type = "Task" [ 1973.543751] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.549356] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1973.549589] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1973.549765] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleting the datastore file [datastore1] cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1973.550406] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-842a0e43-0975-4652-bcde-3794ff8f1ba8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.554893] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522c4752-db3d-982a-cd17-ad9a9b621ecb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.561853] env[62510]: DEBUG oslo_vmware.api [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for the task: (returnval){ [ 1973.561853] env[62510]: value = "task-1769675" [ 1973.561853] env[62510]: _type = "Task" [ 1973.561853] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.569962] env[62510]: DEBUG oslo_vmware.api [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.577733] env[62510]: DEBUG nova.compute.manager [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1973.577950] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1973.578925] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9b8747-d365-4b50-9899-831efd7cb86e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.586496] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1973.586738] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2679ce4c-4fe0-461a-8d4e-cb9d9e6317cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.594441] env[62510]: DEBUG oslo_vmware.api [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1973.594441] env[62510]: value = "task-1769676" [ 1973.594441] env[62510]: _type = "Task" [ 1973.594441] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1973.604483] env[62510]: DEBUG oslo_vmware.api [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.647833] env[62510]: DEBUG nova.network.neutron [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Updated VIF entry in instance network info cache for port 96d4b1d9-73b7-4db5-b298-714b7378bedd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1973.648419] env[62510]: DEBUG nova.network.neutron [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Updating instance_info_cache with network_info: [{"id": "96d4b1d9-73b7-4db5-b298-714b7378bedd", "address": "fa:16:3e:5c:47:ad", "network": {"id": "a41bfcd7-1627-4a16-8977-c23f1762d909", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1396205237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b9ab357f5a1844e7849a848cf56b6187", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96d4b1d9-73", "ovs_interfaceid": "96d4b1d9-73b7-4db5-b298-714b7378bedd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.754319] env[62510]: DEBUG nova.network.neutron [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1973.806028] env[62510]: DEBUG nova.network.neutron [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.852734] env[62510]: INFO nova.compute.manager [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Rescuing [ 1973.852989] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1973.853157] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1973.853349] env[62510]: DEBUG nova.network.neutron [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.907175] env[62510]: DEBUG nova.scheduler.client.report [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1973.907449] env[62510]: DEBUG nova.compute.provider_tree [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 163 to 164 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1973.907629] env[62510]: DEBUG nova.compute.provider_tree [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1974.057866] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522c4752-db3d-982a-cd17-ad9a9b621ecb, 'name': SearchDatastore_Task, 'duration_secs': 0.012397} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.058763] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca7e6614-cd40-4e4a-b8eb-7e93767abb70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.066812] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1974.066812] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5234a329-82b6-c50c-3f86-fc7d6ca87efc" [ 1974.066812] env[62510]: _type = "Task" [ 1974.066812] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.073550] env[62510]: DEBUG oslo_vmware.api [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Task: {'id': task-1769675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174978} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.074982] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1974.074982] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1974.074982] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1974.074982] env[62510]: INFO nova.compute.manager [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1974.075210] env[62510]: DEBUG oslo.service.loopingcall [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1974.075249] env[62510]: DEBUG nova.compute.manager [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1974.075377] env[62510]: DEBUG nova.network.neutron [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1974.080244] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5234a329-82b6-c50c-3f86-fc7d6ca87efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.104560] env[62510]: DEBUG oslo_vmware.api [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769676, 'name': PowerOffVM_Task, 'duration_secs': 0.211239} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.104819] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1974.104983] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1974.105250] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8d5c592-f580-44bd-98b2-37239097093b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.151499] env[62510]: DEBUG oslo_concurrency.lockutils [req-61323288-f289-43b0-8982-119f6d808099 req-a8c8dddd-141f-4646-8656-1574488b8b49 service nova] Releasing lock "refresh_cache-bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.185550] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1974.185820] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1974.186048] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleting the datastore file [datastore1] 11490e72-b9a5-4e8e-86c4-300c594cd914 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1974.186371] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-356e6454-f489-4e8f-94a8-7cabaa813c94 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.200512] env[62510]: DEBUG oslo_vmware.api [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1974.200512] env[62510]: value = "task-1769679" [ 1974.200512] env[62510]: _type = "Task" [ 1974.200512] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.211194] env[62510]: DEBUG oslo_vmware.api [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769679, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.308039] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Releasing lock "refresh_cache-7b2bcec4-6df7-4591-ac02-9da04d185756" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.308497] env[62510]: DEBUG nova.compute.manager [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1974.309987] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1974.309987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.309987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.309987] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.315171] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68fba54c-7cc0-4de6-aa55-5299cbd86bc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.326745] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1974.326745] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04b877ef-ad8d-4e15-8632-7782dc0fad39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.334785] env[62510]: DEBUG oslo_vmware.api [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1974.334785] env[62510]: value = "task-1769680" [ 1974.334785] env[62510]: _type = "Task" [ 1974.334785] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.346051] env[62510]: DEBUG oslo_vmware.api [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769680, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.412494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.880s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.414715] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.036s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.414938] env[62510]: DEBUG nova.objects.instance [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62510) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1974.446932] env[62510]: INFO nova.scheduler.client.report [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleted allocations for instance 5f229f78-6c5d-4170-bdd4-c5522b137949 [ 1974.579846] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5234a329-82b6-c50c-3f86-fc7d6ca87efc, 'name': SearchDatastore_Task, 'duration_secs': 0.016699} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.580252] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.580619] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bf62d0ce-c0e6-4a77-ab05-ac912ec5530f/bf62d0ce-c0e6-4a77-ab05-ac912ec5530f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1974.580973] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06a9f5db-d33f-458c-940a-3a97ebc8b8f4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.590328] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1974.590328] env[62510]: value = "task-1769681" [ 1974.590328] env[62510]: _type = "Task" [ 1974.590328] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.599938] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.711956] env[62510]: DEBUG oslo_vmware.api [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769679, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176791} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.712264] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1974.712475] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1974.712676] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1974.713328] env[62510]: INFO nova.compute.manager [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1974.713328] env[62510]: DEBUG oslo.service.loopingcall [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1974.713534] env[62510]: DEBUG nova.compute.manager [-] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1974.713534] env[62510]: DEBUG nova.network.neutron [-] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1974.850400] env[62510]: DEBUG oslo_vmware.api [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769680, 'name': PowerOffVM_Task, 'duration_secs': 0.213739} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1974.850779] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1974.850909] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1974.851574] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c4df99a-fb04-4832-8a6d-e8c60dc26265 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.884977] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1974.885251] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1974.885478] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Deleting the datastore file [datastore1] 7b2bcec4-6df7-4591-ac02-9da04d185756 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1974.885726] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e7aab7c-fb34-40af-9e69-1e701f177258 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.896918] env[62510]: DEBUG oslo_vmware.api [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for the task: (returnval){ [ 1974.896918] env[62510]: value = "task-1769683" [ 1974.896918] env[62510]: _type = "Task" [ 1974.896918] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.898190] env[62510]: DEBUG nova.network.neutron [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Updating instance_info_cache with network_info: [{"id": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "address": "fa:16:3e:e2:41:3d", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4bf4471-4b", "ovs_interfaceid": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1974.911091] env[62510]: DEBUG oslo_vmware.api [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.962793] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b3ab7701-4db9-43de-9084-83ea1b15a9e5 tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "5f229f78-6c5d-4170-bdd4-c5522b137949" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.376s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.985392] env[62510]: DEBUG nova.compute.manager [req-58848303-d671-475c-9e6a-99794c3314cb req-f9527108-e32d-4867-bef2-357040b96ffd service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Received event network-vif-deleted-022a0379-8a0f-412f-a55a-f8fcaf1102f3 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1974.985603] env[62510]: INFO nova.compute.manager [req-58848303-d671-475c-9e6a-99794c3314cb req-f9527108-e32d-4867-bef2-357040b96ffd service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Neutron deleted interface 022a0379-8a0f-412f-a55a-f8fcaf1102f3; detaching it from the instance and deleting it from the info cache [ 1974.985749] env[62510]: DEBUG nova.network.neutron [req-58848303-d671-475c-9e6a-99794c3314cb req-f9527108-e32d-4867-bef2-357040b96ffd service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.103842] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769681, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.264190] env[62510]: DEBUG nova.compute.manager [req-a56460cd-f009-4ca5-958b-343c2dbaa218 req-7a0a4dbc-92b4-4bfa-b146-8d9fcb0b3f61 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Received event network-vif-deleted-b582dbce-50e8-4781-89ae-5c8667be6584 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1975.264190] env[62510]: INFO nova.compute.manager [req-a56460cd-f009-4ca5-958b-343c2dbaa218 req-7a0a4dbc-92b4-4bfa-b146-8d9fcb0b3f61 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Neutron deleted interface b582dbce-50e8-4781-89ae-5c8667be6584; detaching it from the instance and deleting it from the info cache [ 1975.264190] env[62510]: DEBUG nova.network.neutron [req-a56460cd-f009-4ca5-958b-343c2dbaa218 req-7a0a4dbc-92b4-4bfa-b146-8d9fcb0b3f61 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.367049] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.367049] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.367049] env[62510]: DEBUG nova.network.neutron [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1975.406465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.417332] env[62510]: DEBUG oslo_vmware.api [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.435932] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3ffa73d9-a1f4-4f38-a8c8-fcd82686e5ac tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.451627] env[62510]: DEBUG nova.network.neutron [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.489986] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b41dd790-c4b5-47ef-a57d-47da9cf6be4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.507182] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505569f6-4d1f-47fc-a4a5-3e0c8c17987b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.567042] env[62510]: DEBUG nova.compute.manager [req-58848303-d671-475c-9e6a-99794c3314cb req-f9527108-e32d-4867-bef2-357040b96ffd service nova] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Detach interface failed, port_id=022a0379-8a0f-412f-a55a-f8fcaf1102f3, reason: Instance cf4160a8-1160-45fc-b9e5-e9526b6c1506 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1975.608065] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581939} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.608440] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] bf62d0ce-c0e6-4a77-ab05-ac912ec5530f/bf62d0ce-c0e6-4a77-ab05-ac912ec5530f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1975.608679] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1975.608962] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bf47319-5114-4015-ba73-90f7138789ba {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.621727] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1975.621727] env[62510]: value = "task-1769685" [ 1975.621727] env[62510]: _type = "Task" [ 1975.621727] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.634589] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.742871] env[62510]: DEBUG nova.network.neutron [-] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.767525] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b54ff0bb-a1d2-4190-9d61-f88334a3b9a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.783555] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ab28ba-b75b-47a2-a4a7-275b13eb2c6d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.818481] env[62510]: DEBUG nova.compute.manager [req-a56460cd-f009-4ca5-958b-343c2dbaa218 req-7a0a4dbc-92b4-4bfa-b146-8d9fcb0b3f61 service nova] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Detach interface failed, port_id=b582dbce-50e8-4781-89ae-5c8667be6584, reason: Instance 11490e72-b9a5-4e8e-86c4-300c594cd914 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1975.912629] env[62510]: DEBUG oslo_vmware.api [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Task: {'id': task-1769683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.599541} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.913040] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1975.913116] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1975.913252] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1975.913428] env[62510]: INFO nova.compute.manager [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Took 1.60 seconds to destroy the instance on the hypervisor. [ 1975.913669] env[62510]: DEBUG oslo.service.loopingcall [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.913860] env[62510]: DEBUG nova.compute.manager [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1975.913956] env[62510]: DEBUG nova.network.neutron [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1975.930952] env[62510]: DEBUG nova.network.neutron [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1975.956729] env[62510]: INFO nova.compute.manager [-] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Took 1.88 seconds to deallocate network for instance. [ 1976.103080] env[62510]: DEBUG nova.network.neutron [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance_info_cache with network_info: [{"id": "9015bc32-b9ad-4846-a019-0a10e61e5218", "address": "fa:16:3e:1a:36:04", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9015bc32-b9", "ovs_interfaceid": "9015bc32-b9ad-4846-a019-0a10e61e5218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.129374] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086792} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.129458] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1976.130251] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3498190-4ce3-440e-8afb-a07b7adde14c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.155151] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] bf62d0ce-c0e6-4a77-ab05-ac912ec5530f/bf62d0ce-c0e6-4a77-ab05-ac912ec5530f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1976.155895] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0307d9eb-d1f0-40d5-8fc7-b9069b04c4cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.176284] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1976.176284] env[62510]: value = "task-1769686" [ 1976.176284] env[62510]: _type = "Task" [ 1976.176284] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.185030] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769686, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.245628] env[62510]: INFO nova.compute.manager [-] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Took 1.53 seconds to deallocate network for instance. [ 1976.434898] env[62510]: DEBUG nova.network.neutron [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.467708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.467708] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1976.467708] env[62510]: DEBUG nova.objects.instance [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lazy-loading 'resources' on Instance uuid cf4160a8-1160-45fc-b9e5-e9526b6c1506 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1976.608139] env[62510]: DEBUG oslo_concurrency.lockutils [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.690296] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769686, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.751788] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.939934] env[62510]: INFO nova.compute.manager [-] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Took 1.03 seconds to deallocate network for instance. [ 1976.953532] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1976.953886] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fe204bf-740e-4fb5-b90e-293f079f0f69 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.961999] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1976.961999] env[62510]: value = "task-1769687" [ 1976.961999] env[62510]: _type = "Task" [ 1976.961999] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.975052] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769687, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.133775] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3a216f-69a9-47a9-a544-95b9acbad3a4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.158544] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0582fdfa-79e0-412a-96dd-c89aa1b8eb40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.166819] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance '14a54dac-d2b8-4618-86c8-ab2d08bae005' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1977.189875] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769686, 'name': ReconfigVM_Task, 'duration_secs': 0.919977} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.190342] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Reconfigured VM instance instance-00000076 to attach disk [datastore1] bf62d0ce-c0e6-4a77-ab05-ac912ec5530f/bf62d0ce-c0e6-4a77-ab05-ac912ec5530f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1977.191492] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14850de9-2271-46c5-91fc-a89e0f875591 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.198590] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1977.198590] env[62510]: value = "task-1769688" [ 1977.198590] env[62510]: _type = "Task" [ 1977.198590] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.212137] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769688, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.240725] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aaffb2e-db50-4b5a-88df-88be80ca3b0c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.250384] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867a1d20-14a7-422a-ba8c-f1592901334f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.280605] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f406574-030c-40f7-a2e3-a8131a6dfa5a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.288840] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c9e3c8-6ea1-49b2-97c6-9f0ff2b1aa5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.302477] env[62510]: DEBUG nova.compute.provider_tree [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1977.419947] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.420187] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.447069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.471829] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769687, 'name': PowerOffVM_Task, 'duration_secs': 0.24531} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.472101] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1977.472906] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b510ea23-d979-4a17-8e50-59732406ed73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.492970] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5493302-b7ae-46db-9a23-570a37930d19 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.526620] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1977.526902] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2540f07-eef2-4372-ac84-49af8d08cbc0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.534957] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1977.534957] env[62510]: value = "task-1769689" [ 1977.534957] env[62510]: _type = "Task" [ 1977.534957] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.542804] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769689, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.674067] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1977.674343] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3e8dc78-a727-41f9-aae6-b2396349071d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.684059] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1977.684059] env[62510]: value = "task-1769690" [ 1977.684059] env[62510]: _type = "Task" [ 1977.684059] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.693703] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769690, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.709904] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769688, 'name': Rename_Task, 'duration_secs': 0.254709} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.710304] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1977.710652] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e879ff1c-639f-41ed-b390-0bbebd33b8a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.719265] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1977.719265] env[62510]: value = "task-1769691" [ 1977.719265] env[62510]: _type = "Task" [ 1977.719265] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.728938] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.806326] env[62510]: DEBUG nova.scheduler.client.report [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1977.925889] env[62510]: DEBUG nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1978.050075] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1978.050075] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1978.050075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.050075] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.050075] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1978.050075] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee4a71bb-2d2c-4bbf-9d44-4aff36c778a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.059322] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1978.060377] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1978.061647] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23bd2a73-2d13-4538-a7eb-c4a97608597c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.071300] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1978.071300] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523d3c3a-968e-aca3-74fa-d8b1e6431f55" [ 1978.071300] env[62510]: _type = "Task" [ 1978.071300] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.082288] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523d3c3a-968e-aca3-74fa-d8b1e6431f55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.195739] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769690, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.233664] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769691, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.315141] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.848s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.318138] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.566s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1978.318138] env[62510]: DEBUG nova.objects.instance [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'resources' on Instance uuid 11490e72-b9a5-4e8e-86c4-300c594cd914 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1978.344368] env[62510]: INFO nova.scheduler.client.report [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Deleted allocations for instance cf4160a8-1160-45fc-b9e5-e9526b6c1506 [ 1978.448349] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1978.585086] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523d3c3a-968e-aca3-74fa-d8b1e6431f55, 'name': SearchDatastore_Task, 'duration_secs': 0.011616} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.586024] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-315cadc5-b220-4cfe-8604-76f5a477d26a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.591777] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1978.591777] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a032e0-0546-5e81-f477-7a1bd6a94c72" [ 1978.591777] env[62510]: _type = "Task" [ 1978.591777] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.602090] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a032e0-0546-5e81-f477-7a1bd6a94c72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.696413] env[62510]: DEBUG oslo_vmware.api [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769690, 'name': PowerOnVM_Task, 'duration_secs': 0.57418} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.696701] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1978.696890] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-24217692-413f-4e12-a3df-3b543e0822a8 tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance '14a54dac-d2b8-4618-86c8-ab2d08bae005' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1978.731034] env[62510]: DEBUG oslo_vmware.api [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769691, 'name': PowerOnVM_Task, 'duration_secs': 0.75482} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.731034] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1978.731176] env[62510]: INFO nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1978.731288] env[62510]: DEBUG nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1978.732073] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a83599-9860-4456-a8aa-ed2175de431a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.852194] env[62510]: DEBUG oslo_concurrency.lockutils [None req-42b04c17-672f-405a-84d8-cea3054b0669 tempest-AttachVolumeShelveTestJSON-1484778233 tempest-AttachVolumeShelveTestJSON-1484778233-project-member] Lock "cf4160a8-1160-45fc-b9e5-e9526b6c1506" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.454s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.055128] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3193e7-d106-4dbf-8976-9b30f04f8a7c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.065851] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7d2a2b-14c7-42f1-bfb7-81007c966add {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.100734] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa07810-6f32-45f5-99cd-a521ae724cd5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.111527] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd234e74-9b8e-4d3f-9e41-78741de6d806 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.115579] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a032e0-0546-5e81-f477-7a1bd6a94c72, 'name': SearchDatastore_Task, 'duration_secs': 0.011642} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.115701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.115955] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. {{(pid=62510) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1979.116714] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d7e24c5-bafb-49e4-8ddb-7a22cf1cae5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.127748] env[62510]: DEBUG nova.compute.provider_tree [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1979.133794] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1979.133794] env[62510]: value = "task-1769692" [ 1979.133794] env[62510]: _type = "Task" [ 1979.133794] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.142301] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.255320] env[62510]: INFO nova.compute.manager [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Took 13.08 seconds to build instance. [ 1979.631785] env[62510]: DEBUG nova.scheduler.client.report [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1979.647926] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769692, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.758187] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a6267cd3-48e6-42a5-8f0d-6bef5d366deb tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.600s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.142405] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.146031] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.699s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.146279] env[62510]: DEBUG nova.objects.instance [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lazy-loading 'resources' on Instance uuid 7b2bcec4-6df7-4591-ac02-9da04d185756 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1980.153185] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.609824} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.154024] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. [ 1980.154841] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0c80b8-c03c-420e-a569-5d87790f856f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.192919] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1980.194188] env[62510]: INFO nova.scheduler.client.report [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted allocations for instance 11490e72-b9a5-4e8e-86c4-300c594cd914 [ 1980.195467] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8594af4-e5b2-4029-b7f3-6a1746b916e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.223488] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1980.223488] env[62510]: value = "task-1769694" [ 1980.223488] env[62510]: _type = "Task" [ 1980.223488] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.232226] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769694, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.338635] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.338882] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.339102] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.339301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.339508] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.341510] env[62510]: INFO nova.compute.manager [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Terminating instance [ 1980.720148] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cb9cc2d5-96c2-4341-8f69-c98e4f9a192b tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "11490e72-b9a5-4e8e-86c4-300c594cd914" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.652s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.735442] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769694, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.796447] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.796755] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.796954] env[62510]: DEBUG nova.compute.manager [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Going to confirm migration 6 {{(pid=62510) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 1980.848919] env[62510]: DEBUG nova.compute.manager [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1980.849290] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1980.852778] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267c242b-bec3-4666-853b-e49228ebe2ab {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.861521] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1980.861843] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56634e85-4cc3-4176-9943-9038a2496172 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.868479] env[62510]: DEBUG oslo_vmware.api [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1980.868479] env[62510]: value = "task-1769695" [ 1980.868479] env[62510]: _type = "Task" [ 1980.868479] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.876706] env[62510]: DEBUG oslo_vmware.api [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769695, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.878411] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420e83c9-634b-41e4-bf58-4b6d0abe8225 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.885785] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a460ddfc-5176-4081-9c5b-c17ea7877962 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.920141] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5fe9d9-7f7b-47ed-9465-4dd099d6a7c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.928377] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e31923-3c04-4e49-a9d0-4338255bddc4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.945344] env[62510]: DEBUG nova.compute.provider_tree [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.009819] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.010115] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.010355] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.010551] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.010847] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.013033] env[62510]: INFO nova.compute.manager [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Terminating instance [ 1981.236065] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769694, 'name': ReconfigVM_Task, 'duration_secs': 0.78699} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.236522] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Reconfigured VM instance instance-00000075 to attach disk [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1981.237356] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab9e58f-cdec-4f1d-bae4-2b0611a96781 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.264362] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f87aa5fd-a9f5-4f03-808a-eb8118d9b68b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.279776] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1981.279776] env[62510]: value = "task-1769696" [ 1981.279776] env[62510]: _type = "Task" [ 1981.279776] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.288651] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769696, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.334930] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1981.335092] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquired lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.335250] env[62510]: DEBUG nova.network.neutron [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1981.335438] env[62510]: DEBUG nova.objects.instance [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'info_cache' on Instance uuid 14a54dac-d2b8-4618-86c8-ab2d08bae005 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1981.378803] env[62510]: DEBUG oslo_vmware.api [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769695, 'name': PowerOffVM_Task, 'duration_secs': 0.295677} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.379200] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1981.379450] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1981.379809] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-faa7b20b-5b10-4345-b25a-70ea25e320af {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.448975] env[62510]: DEBUG nova.scheduler.client.report [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1981.453647] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1981.453856] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1981.454049] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleting the datastore file [datastore1] 8a230335-6388-45fb-a29e-9e63ddb4d5f2 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1981.454533] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fa791d8-c902-4735-bd43-b52b18a0984c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.461071] env[62510]: DEBUG oslo_vmware.api [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1981.461071] env[62510]: value = "task-1769698" [ 1981.461071] env[62510]: _type = "Task" [ 1981.461071] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.470347] env[62510]: DEBUG oslo_vmware.api [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769698, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.498165] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.498502] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.518876] env[62510]: DEBUG nova.compute.manager [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1981.519211] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1981.520428] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9c361d-f1ed-4957-8a3a-5a1ab099c213 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.531303] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1981.531551] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed52158b-1348-4083-9605-ebbf0382db7f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.537883] env[62510]: DEBUG oslo_vmware.api [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1981.537883] env[62510]: value = "task-1769699" [ 1981.537883] env[62510]: _type = "Task" [ 1981.537883] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.546107] env[62510]: DEBUG oslo_vmware.api [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.789624] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769696, 'name': ReconfigVM_Task, 'duration_secs': 0.304671} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.789922] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1981.790191] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d483efc9-a1e8-42ec-bac5-ab72bb60542a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.796904] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1981.796904] env[62510]: value = "task-1769700" [ 1981.796904] env[62510]: _type = "Task" [ 1981.796904] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.804589] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.955778] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.810s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.959061] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.510s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.960645] env[62510]: INFO nova.compute.claims [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1981.972834] env[62510]: DEBUG oslo_vmware.api [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769698, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.430816} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.973574] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1981.973574] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1981.973574] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1981.973702] env[62510]: INFO nova.compute.manager [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1981.973982] env[62510]: DEBUG oslo.service.loopingcall [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1981.974210] env[62510]: DEBUG nova.compute.manager [-] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1981.974441] env[62510]: DEBUG nova.network.neutron [-] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1981.980221] env[62510]: INFO nova.scheduler.client.report [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Deleted allocations for instance 7b2bcec4-6df7-4591-ac02-9da04d185756 [ 1982.001527] env[62510]: INFO nova.compute.manager [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Detaching volume 1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a [ 1982.044573] env[62510]: INFO nova.virt.block_device [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Attempting to driver detach volume 1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a from mountpoint /dev/sdb [ 1982.044816] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1982.045036] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367479', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'name': 'volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3', 'attached_at': '', 'detached_at': '', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'serial': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1982.045933] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d2a45b5-ec8c-4556-af9c-c528b8c6a18b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.052145] env[62510]: DEBUG oslo_vmware.api [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769699, 'name': PowerOffVM_Task, 'duration_secs': 0.368435} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.052843] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1982.053058] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1982.053387] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea23badc-8c33-460c-a6f8-007ee87341aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.072092] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f960f7-f965-4052-a2ca-4c34e4bb3a52 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.078901] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2e5395-9e73-494e-99d1-a1a4ec21f463 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.100636] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6ff69e-d74e-4a7a-ad30-368630db8c21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.116683] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] The volume has not been displaced from its original location: [datastore1] volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a/volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1982.123044] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1982.123044] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2cdb0c8-1e4e-4bf3-a5e0-34f0c51ea7e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.142366] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1982.142366] env[62510]: value = "task-1769702" [ 1982.142366] env[62510]: _type = "Task" [ 1982.142366] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.147180] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1982.147848] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1982.147848] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Deleting the datastore file [datastore1] bf62d0ce-c0e6-4a77-ab05-ac912ec5530f {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1982.148226] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee84011c-86c2-4a40-a4b5-f29e833053be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.154024] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769702, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.157487] env[62510]: DEBUG oslo_vmware.api [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for the task: (returnval){ [ 1982.157487] env[62510]: value = "task-1769703" [ 1982.157487] env[62510]: _type = "Task" [ 1982.157487] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.165016] env[62510]: DEBUG oslo_vmware.api [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769703, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.307177] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769700, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.355090] env[62510]: DEBUG nova.compute.manager [req-39f4d2ff-aadb-496e-8115-c6706d6fad11 req-3c993e39-6c5d-40f9-b87b-ba929e76a42e service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Received event network-vif-deleted-a0b771a0-7aa2-49f0-9945-9956c4260b99 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1982.355326] env[62510]: INFO nova.compute.manager [req-39f4d2ff-aadb-496e-8115-c6706d6fad11 req-3c993e39-6c5d-40f9-b87b-ba929e76a42e service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Neutron deleted interface a0b771a0-7aa2-49f0-9945-9956c4260b99; detaching it from the instance and deleting it from the info cache [ 1982.355677] env[62510]: DEBUG nova.network.neutron [req-39f4d2ff-aadb-496e-8115-c6706d6fad11 req-3c993e39-6c5d-40f9-b87b-ba929e76a42e service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.490034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-eb091f5b-2515-4a31-a0b0-eaa41c4d8d84 tempest-ServerShowV257Test-213078519 tempest-ServerShowV257Test-213078519-project-member] Lock "7b2bcec4-6df7-4591-ac02-9da04d185756" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.788s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.550088] env[62510]: DEBUG nova.network.neutron [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance_info_cache with network_info: [{"id": "9015bc32-b9ad-4846-a019-0a10e61e5218", "address": "fa:16:3e:1a:36:04", "network": {"id": "de9186ec-ac4f-4ac0-8499-037f92e28197", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-164983974-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f878b652f01c48139bfc6996e5e32f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "191a5351-07d5-4138-b855-206f48fc4375", "external-id": "nsx-vlan-transportzone-939", "segmentation_id": 939, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9015bc32-b9", "ovs_interfaceid": "9015bc32-b9ad-4846-a019-0a10e61e5218", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.655056] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769702, 'name': ReconfigVM_Task, 'duration_secs': 0.376091} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.655455] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1982.661079] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9955f282-ee1a-43ec-9551-d7a7988c60a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.680745] env[62510]: DEBUG oslo_vmware.api [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Task: {'id': task-1769703, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348596} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.682027] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1982.682279] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1982.682443] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1982.682658] env[62510]: INFO nova.compute.manager [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1982.682920] env[62510]: DEBUG oslo.service.loopingcall [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1982.683192] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1982.683192] env[62510]: value = "task-1769704" [ 1982.683192] env[62510]: _type = "Task" [ 1982.683192] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.683433] env[62510]: DEBUG nova.compute.manager [-] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1982.683560] env[62510]: DEBUG nova.network.neutron [-] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1982.693562] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769704, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.745175] env[62510]: DEBUG nova.network.neutron [-] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.811586] env[62510]: DEBUG oslo_vmware.api [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769700, 'name': PowerOnVM_Task, 'duration_secs': 0.529389} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.811711] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1982.814472] env[62510]: DEBUG nova.compute.manager [None req-4bfcbe74-0c15-45fd-b170-a72078cb4246 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1982.815289] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056b9d85-fe62-40ee-9756-bede20a34906 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.857609] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2cd0db7e-e6e9-4d84-ba90-a8139c338886 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.869188] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e20742a-fd50-496c-a365-2f5e5914191b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.906306] env[62510]: DEBUG nova.compute.manager [req-39f4d2ff-aadb-496e-8115-c6706d6fad11 req-3c993e39-6c5d-40f9-b87b-ba929e76a42e service nova] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Detach interface failed, port_id=a0b771a0-7aa2-49f0-9945-9956c4260b99, reason: Instance 8a230335-6388-45fb-a29e-9e63ddb4d5f2 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1983.052776] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Releasing lock "refresh_cache-14a54dac-d2b8-4618-86c8-ab2d08bae005" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.053061] env[62510]: DEBUG nova.objects.instance [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lazy-loading 'migration_context' on Instance uuid 14a54dac-d2b8-4618-86c8-ab2d08bae005 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1983.172938] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-644074c3-3471-4f1d-87a7-1e80fe8b7f35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.182210] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e19349-1808-434a-84a7-9abcf0ce5256 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.215176] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244b8916-e0e6-421c-8934-f96f768434b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.221082] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769704, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.225664] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58bf5fb0-9062-405e-8b05-4f0a37c50014 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.239206] env[62510]: DEBUG nova.compute.provider_tree [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1983.247244] env[62510]: INFO nova.compute.manager [-] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Took 1.27 seconds to deallocate network for instance. [ 1983.503912] env[62510]: DEBUG nova.network.neutron [-] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.556505] env[62510]: DEBUG nova.objects.base [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Object Instance<14a54dac-d2b8-4618-86c8-ab2d08bae005> lazy-loaded attributes: info_cache,migration_context {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1983.557451] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b045142-4442-41dc-a5dc-9ac5cbdd5816 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.577895] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83018361-7922-48a7-a898-64af3d41fd77 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.584863] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1983.584863] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524f50d9-a5d5-0858-2822-e7bf29c302a9" [ 1983.584863] env[62510]: _type = "Task" [ 1983.584863] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.593634] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524f50d9-a5d5-0858-2822-e7bf29c302a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.696432] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769704, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.742654] env[62510]: DEBUG nova.scheduler.client.report [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1983.753249] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.006329] env[62510]: INFO nova.compute.manager [-] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Took 1.32 seconds to deallocate network for instance. [ 1984.095724] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524f50d9-a5d5-0858-2822-e7bf29c302a9, 'name': SearchDatastore_Task, 'duration_secs': 0.014325} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.096025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.197737] env[62510]: DEBUG oslo_vmware.api [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769704, 'name': ReconfigVM_Task, 'duration_secs': 1.183593} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.198112] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367479', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'name': 'volume-1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3', 'attached_at': '', 'detached_at': '', 'volume_id': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a', 'serial': '1d070b5e-f3a1-4f0d-9e31-9e75d1f1ee0a'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1984.250058] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.254085] env[62510]: DEBUG nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1984.255438] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.502s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.255764] env[62510]: DEBUG nova.objects.instance [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lazy-loading 'resources' on Instance uuid 8a230335-6388-45fb-a29e-9e63ddb4d5f2 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1984.449180] env[62510]: DEBUG nova.compute.manager [req-0e4f48c6-dac8-49dc-a977-57fd3c859c2a req-64441e0d-2a34-4132-9a68-99637b476b08 service nova] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Received event network-vif-deleted-96d4b1d9-73b7-4db5-b298-714b7378bedd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1984.513496] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.539668] env[62510]: INFO nova.compute.manager [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Unrescuing [ 1984.539933] env[62510]: DEBUG oslo_concurrency.lockutils [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1984.540098] env[62510]: DEBUG oslo_concurrency.lockutils [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquired lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.540270] env[62510]: DEBUG nova.network.neutron [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1984.748100] env[62510]: DEBUG nova.objects.instance [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'flavor' on Instance uuid 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1984.759487] env[62510]: DEBUG nova.compute.utils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1984.763526] env[62510]: DEBUG nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1984.763750] env[62510]: DEBUG nova.network.neutron [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1984.816538] env[62510]: DEBUG nova.policy [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6decc076b3da4d1b86c6aa73f1cf2674', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86abf24d608d4c438161dc0b8335dea1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 1984.981653] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e715ae-1f11-49a9-9639-ff21e670c3ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.990366] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ad7f25-ef37-42ca-9616-73327ee3a5ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.027756] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aab3b5-3d1c-4458-95ba-6f5a11271eaa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.035698] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9525a856-7fb0-416c-a709-847a85879670 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.053546] env[62510]: DEBUG nova.compute.provider_tree [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1985.134654] env[62510]: DEBUG nova.network.neutron [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Successfully created port: 453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1985.264359] env[62510]: DEBUG nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1985.355407] env[62510]: DEBUG nova.network.neutron [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Updating instance_info_cache with network_info: [{"id": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "address": "fa:16:3e:e2:41:3d", "network": {"id": "dfd8964c-0225-4df4-815d-ef7af9be1790", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2123884413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "fa6ed026a1264d02abe75467127bae99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4bf4471-4b", "ovs_interfaceid": "f4bf4471-4bf0-485f-80a8-2548fbf3e100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.556374] env[62510]: DEBUG nova.scheduler.client.report [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1985.761107] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0335bcc0-601b-4f15-adbf-a2fbd721aaad tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.262s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.777539] env[62510]: INFO nova.virt.block_device [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Booting with volume 5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8 at /dev/sda [ 1985.812383] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c208ad55-cb6b-4e87-aad4-f0777d9ef560 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.823423] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c6a2e7-10fa-4fd5-b2d3-8054ae4350a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.855375] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-463b264a-1869-4f38-8252-4a385a0adf42 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.857771] env[62510]: DEBUG oslo_concurrency.lockutils [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Releasing lock "refresh_cache-abf15987-86cc-4fdc-be9a-efd0448ce9ca" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.858454] env[62510]: DEBUG nova.objects.instance [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lazy-loading 'flavor' on Instance uuid abf15987-86cc-4fdc-be9a-efd0448ce9ca {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1985.867828] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cf3f80-4f53-4eba-b408-9b757b23249e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.903010] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a74870-81d2-4513-b3a9-e14564dea533 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.909613] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd298051-58f5-4e7f-b13f-49d7ea495d68 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.922991] env[62510]: DEBUG nova.virt.block_device [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating existing volume attachment record: 34979739-3502-4363-9703-fdf4a76549b8 {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1986.061271] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.806s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.063742] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.967s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.084482] env[62510]: INFO nova.scheduler.client.report [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleted allocations for instance 8a230335-6388-45fb-a29e-9e63ddb4d5f2 [ 1986.210391] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.210391] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.210627] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.210859] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.211060] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.213230] env[62510]: INFO nova.compute.manager [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Terminating instance [ 1986.364604] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864bb5b3-4f7a-462e-87d7-9a576886be1f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.386657] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1986.386963] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43603293-319c-4b73-9809-dcd0b160d404 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.393665] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1986.393665] env[62510]: value = "task-1769705" [ 1986.393665] env[62510]: _type = "Task" [ 1986.393665] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.401702] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.595395] env[62510]: DEBUG oslo_concurrency.lockutils [None req-85d88707-1ca4-4dc3-a81c-fa8e5b5893da tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "8a230335-6388-45fb-a29e-9e63ddb4d5f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.256s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.717044] env[62510]: DEBUG nova.compute.manager [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1986.718469] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1986.718469] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fe3be8-8ea0-47ad-b331-9040853652a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.730009] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1986.730305] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce322621-f36c-4408-816f-3b4713aef460 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.736404] env[62510]: DEBUG oslo_vmware.api [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1986.736404] env[62510]: value = "task-1769706" [ 1986.736404] env[62510]: _type = "Task" [ 1986.736404] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.747859] env[62510]: DEBUG oslo_vmware.api [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769706, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.765910] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626427c2-e6f4-44fd-9afd-440b8919f66d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.773158] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd77243c-fe17-42b1-bfba-7495c2444d50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.804893] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71460a3-29fc-4f4d-b3a1-cc433a084803 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.812981] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8eec8b-5947-4d07-803e-78ab7d00a376 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.829024] env[62510]: DEBUG nova.compute.provider_tree [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.834195] env[62510]: DEBUG nova.network.neutron [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Successfully updated port: 453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1986.902466] env[62510]: DEBUG nova.compute.manager [req-fc961753-eed3-485b-a83d-ebfe98f8de21 req-75911b06-0230-463e-aece-5542521f7167 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Received event network-vif-plugged-453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1986.902691] env[62510]: DEBUG oslo_concurrency.lockutils [req-fc961753-eed3-485b-a83d-ebfe98f8de21 req-75911b06-0230-463e-aece-5542521f7167 service nova] Acquiring lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.902887] env[62510]: DEBUG oslo_concurrency.lockutils [req-fc961753-eed3-485b-a83d-ebfe98f8de21 req-75911b06-0230-463e-aece-5542521f7167 service nova] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.903064] env[62510]: DEBUG oslo_concurrency.lockutils [req-fc961753-eed3-485b-a83d-ebfe98f8de21 req-75911b06-0230-463e-aece-5542521f7167 service nova] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.903274] env[62510]: DEBUG nova.compute.manager [req-fc961753-eed3-485b-a83d-ebfe98f8de21 req-75911b06-0230-463e-aece-5542521f7167 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] No waiting events found dispatching network-vif-plugged-453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1986.903377] env[62510]: WARNING nova.compute.manager [req-fc961753-eed3-485b-a83d-ebfe98f8de21 req-75911b06-0230-463e-aece-5542521f7167 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Received unexpected event network-vif-plugged-453df25e-58eb-42b3-aa0a-3771b21d6b25 for instance with vm_state building and task_state block_device_mapping. [ 1986.906921] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769705, 'name': PowerOffVM_Task, 'duration_secs': 0.232031} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.907773] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1986.913845] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1986.914719] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff4ec8fc-e476-472c-b5ee-0cf426503f09 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.934220] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1986.934220] env[62510]: value = "task-1769707" [ 1986.934220] env[62510]: _type = "Task" [ 1986.934220] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.942884] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769707, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.076917] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.077177] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.077417] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.077616] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.077800] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.080102] env[62510]: INFO nova.compute.manager [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Terminating instance [ 1987.181674] env[62510]: DEBUG oslo_concurrency.lockutils [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "22002fc1-647e-4e65-a5f0-c3a34575985f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.181907] env[62510]: DEBUG oslo_concurrency.lockutils [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.246247] env[62510]: DEBUG oslo_vmware.api [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769706, 'name': PowerOffVM_Task, 'duration_secs': 0.239683} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.246526] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1987.246712] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1987.246958] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2def682b-5720-4e61-871d-72908e560520 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.329484] env[62510]: DEBUG nova.scheduler.client.report [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1987.439864] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.440026] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.440160] env[62510]: DEBUG nova.network.neutron [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1987.446806] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769707, 'name': ReconfigVM_Task, 'duration_secs': 0.207236} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.447566] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1987.447787] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1987.448061] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87b103ec-53b8-4f86-9dfa-1188b86e2cb7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.455227] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1987.455227] env[62510]: value = "task-1769709" [ 1987.455227] env[62510]: _type = "Task" [ 1987.455227] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.462609] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769709, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.583820] env[62510]: DEBUG nova.compute.manager [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1987.584181] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1987.585243] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e884f393-c072-4c2b-bcf8-1e5f30bc0d43 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.594674] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1987.594975] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4440ceea-5756-4273-8407-1cfc1920862f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.601574] env[62510]: DEBUG oslo_vmware.api [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1987.601574] env[62510]: value = "task-1769710" [ 1987.601574] env[62510]: _type = "Task" [ 1987.601574] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.610476] env[62510]: DEBUG oslo_vmware.api [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.685423] env[62510]: DEBUG nova.compute.utils [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1987.965413] env[62510]: DEBUG oslo_vmware.api [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769709, 'name': PowerOnVM_Task, 'duration_secs': 0.433284} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.965838] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1987.965925] env[62510]: DEBUG nova.compute.manager [None req-074d1bbf-5157-4feb-bcf4-50a504a98d3b tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1987.966731] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-272e96b5-520f-46d2-b113-acf4a995bf57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.010026] env[62510]: DEBUG nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1988.010628] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1988.010877] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1988.011071] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1988.011266] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1988.011416] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1988.011567] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1988.011775] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1988.011934] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1988.012114] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1988.012278] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1988.012452] env[62510]: DEBUG nova.virt.hardware [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1988.013889] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9714350d-2531-4944-9de7-37f15b8303b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.022184] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d2a089-ecf9-4298-84c4-fd6dec64b15d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.038126] env[62510]: DEBUG nova.network.neutron [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1988.111383] env[62510]: DEBUG oslo_vmware.api [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769710, 'name': PowerOffVM_Task, 'duration_secs': 0.197938} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.111631] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1988.111800] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1988.112049] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c0957bc-0b09-4bb2-b03d-168211e472f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.177917] env[62510]: DEBUG nova.network.neutron [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [{"id": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "address": "fa:16:3e:5b:92:84", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap453df25e-58", "ovs_interfaceid": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1988.188407] env[62510]: DEBUG oslo_concurrency.lockutils [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.339340] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.276s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.342038] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.829s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.342768] env[62510]: DEBUG nova.objects.instance [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lazy-loading 'resources' on Instance uuid bf62d0ce-c0e6-4a77-ab05-ac912ec5530f {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1988.680876] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.681307] env[62510]: DEBUG nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Instance network_info: |[{"id": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "address": "fa:16:3e:5b:92:84", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap453df25e-58", "ovs_interfaceid": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1988.681665] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:92:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9630cae2-7dd9-42b7-8b53-91ab254af243', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '453df25e-58eb-42b3-aa0a-3771b21d6b25', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1988.691061] env[62510]: DEBUG oslo.service.loopingcall [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1988.691061] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1988.691061] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb14a8b6-7875-45f3-a18e-2012cfb1c56f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.713376] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1988.713376] env[62510]: value = "task-1769712" [ 1988.713376] env[62510]: _type = "Task" [ 1988.713376] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.721448] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769712, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.790162] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1988.790528] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1988.790838] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleting the datastore file [datastore1] 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1988.791531] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5fd22bd1-ee50-4b38-a634-81347fa93f6d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.798753] env[62510]: DEBUG oslo_vmware.api [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 1988.798753] env[62510]: value = "task-1769713" [ 1988.798753] env[62510]: _type = "Task" [ 1988.798753] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.807405] env[62510]: DEBUG oslo_vmware.api [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.808601] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1988.808812] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1988.808989] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleting the datastore file [datastore1] 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1988.809252] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5daf19ee-0df0-4401-9cac-e215a2f3bc2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.815565] env[62510]: DEBUG oslo_vmware.api [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for the task: (returnval){ [ 1988.815565] env[62510]: value = "task-1769714" [ 1988.815565] env[62510]: _type = "Task" [ 1988.815565] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.823429] env[62510]: DEBUG oslo_vmware.api [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769714, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.900948] env[62510]: INFO nova.scheduler.client.report [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted allocation for migration 42a6e31c-3e78-4d02-aef7-a77d618265e4 [ 1989.033753] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f32c2c0-eaf4-48a3-ac96-34ea1cf52e56 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.041527] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66afbb9d-4a72-4f3e-8f8b-43cf5b03e91c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.073343] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5061ea10-2cf2-40ca-9984-1ecc3f715ee7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.081088] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005c3a24-5649-4b65-863c-45d03276ed71 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.094538] env[62510]: DEBUG nova.compute.provider_tree [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1989.198255] env[62510]: DEBUG nova.compute.manager [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Received event network-changed-453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1989.198449] env[62510]: DEBUG nova.compute.manager [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Refreshing instance network info cache due to event network-changed-453df25e-58eb-42b3-aa0a-3771b21d6b25. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1989.198676] env[62510]: DEBUG oslo_concurrency.lockutils [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] Acquiring lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.198820] env[62510]: DEBUG oslo_concurrency.lockutils [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] Acquired lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.198981] env[62510]: DEBUG nova.network.neutron [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Refreshing network info cache for port 453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1989.224067] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769712, 'name': CreateVM_Task, 'duration_secs': 0.397654} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.224226] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1989.224890] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367499', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'name': 'volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f39e74c3-eb58-4d28-a489-73d2de1e9bef', 'attached_at': '', 'detached_at': '', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'serial': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8'}, 'attachment_id': '34979739-3502-4363-9703-fdf4a76549b8', 'mount_device': '/dev/sda', 'device_type': None, 'disk_bus': None, 'guest_format': None, 'boot_index': 0, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62510) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1989.225104] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Root volume attach. Driver type: vmdk {{(pid=62510) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1989.225884] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f498515-81b5-4497-9fdc-ae8e84755ab5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.233206] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791d9c12-d295-45b8-b5a3-c59959c3449c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.239348] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d48c34-5965-4f45-9085-bbb01173f360 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.245455] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-3055fb0b-354a-4ece-8872-8a8b7130c968 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.251986] env[62510]: DEBUG oslo_concurrency.lockutils [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "22002fc1-647e-4e65-a5f0-c3a34575985f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.252236] env[62510]: DEBUG oslo_concurrency.lockutils [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.252455] env[62510]: INFO nova.compute.manager [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Attaching volume adb312a1-8d6e-418b-86ab-664579515ac4 to /dev/sdb [ 1989.254197] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1989.254197] env[62510]: value = "task-1769715" [ 1989.254197] env[62510]: _type = "Task" [ 1989.254197] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.271268] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769715, 'name': RelocateVM_Task} progress is 7%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.289167] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94fed7d-7a52-46e1-9bc1-7a1cc3299b63 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.296190] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448869f2-03d8-4b2c-b9db-3e5acfecbe68 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.309045] env[62510]: DEBUG oslo_vmware.api [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16177} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.311330] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1989.311524] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1989.311706] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1989.311879] env[62510]: INFO nova.compute.manager [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Took 2.59 seconds to destroy the instance on the hypervisor. [ 1989.312133] env[62510]: DEBUG oslo.service.loopingcall [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1989.312409] env[62510]: DEBUG nova.virt.block_device [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating existing volume attachment record: 22beffb0-a3a3-4661-83cd-1d7915957a17 {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1989.314467] env[62510]: DEBUG nova.compute.manager [-] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1989.314567] env[62510]: DEBUG nova.network.neutron [-] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1989.323511] env[62510]: DEBUG oslo_vmware.api [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Task: {'id': task-1769714, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172581} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.323745] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1989.323924] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1989.324115] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1989.324290] env[62510]: INFO nova.compute.manager [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1989.324516] env[62510]: DEBUG oslo.service.loopingcall [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1989.324696] env[62510]: DEBUG nova.compute.manager [-] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1989.324789] env[62510]: DEBUG nova.network.neutron [-] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1989.408140] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.611s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.597641] env[62510]: DEBUG nova.scheduler.client.report [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1989.648573] env[62510]: DEBUG nova.compute.manager [req-5bc42051-45ea-4164-a759-aab0c649d277 req-22f1cf07-5d70-4a3c-a6d8-8c3a2a3d10e6 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Received event network-vif-deleted-d3047f95-b766-4344-bc0c-ad2f1b9f55fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1989.648778] env[62510]: INFO nova.compute.manager [req-5bc42051-45ea-4164-a759-aab0c649d277 req-22f1cf07-5d70-4a3c-a6d8-8c3a2a3d10e6 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Neutron deleted interface d3047f95-b766-4344-bc0c-ad2f1b9f55fd; detaching it from the instance and deleting it from the info cache [ 1989.648955] env[62510]: DEBUG nova.network.neutron [req-5bc42051-45ea-4164-a759-aab0c649d277 req-22f1cf07-5d70-4a3c-a6d8-8c3a2a3d10e6 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.764591] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.764859] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.765065] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.765246] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.765418] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.771934] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769715, 'name': RelocateVM_Task, 'duration_secs': 0.025273} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.772435] env[62510]: INFO nova.compute.manager [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Terminating instance [ 1989.773770] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1989.774024] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367499', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'name': 'volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f39e74c3-eb58-4d28-a489-73d2de1e9bef', 'attached_at': '', 'detached_at': '', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'serial': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1989.776039] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28661a2a-a864-492a-a4d9-3dbba6096bb8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.796077] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab01816d-3705-4546-b420-296a6ac4487a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.820164] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8/volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1989.820738] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-670c3bdc-fc2e-4913-8182-bf2e3ab3c059 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.839303] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1989.839303] env[62510]: value = "task-1769718" [ 1989.839303] env[62510]: _type = "Task" [ 1989.839303] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.846980] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769718, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.965479] env[62510]: DEBUG nova.network.neutron [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updated VIF entry in instance network info cache for port 453df25e-58eb-42b3-aa0a-3771b21d6b25. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1989.965862] env[62510]: DEBUG nova.network.neutron [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [{"id": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "address": "fa:16:3e:5b:92:84", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap453df25e-58", "ovs_interfaceid": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.081288] env[62510]: DEBUG nova.network.neutron [-] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.102915] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.124806] env[62510]: INFO nova.scheduler.client.report [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Deleted allocations for instance bf62d0ce-c0e6-4a77-ab05-ac912ec5530f [ 1990.152813] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb1a606c-4861-48cd-8873-82a0df501185 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.163182] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f5dc07-88d0-400a-b6a5-c5a4e82595c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.197681] env[62510]: DEBUG nova.compute.manager [req-5bc42051-45ea-4164-a759-aab0c649d277 req-22f1cf07-5d70-4a3c-a6d8-8c3a2a3d10e6 service nova] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Detach interface failed, port_id=d3047f95-b766-4344-bc0c-ad2f1b9f55fd, reason: Instance 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1990.275758] env[62510]: DEBUG nova.network.neutron [-] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.280884] env[62510]: DEBUG nova.compute.manager [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1990.280884] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1990.281662] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13aa1732-dba1-4b57-a775-fcc3c352b92f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.291513] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1990.291513] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7be20360-2037-4814-9fcb-e68720c0219c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.298160] env[62510]: DEBUG oslo_vmware.api [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1990.298160] env[62510]: value = "task-1769720" [ 1990.298160] env[62510]: _type = "Task" [ 1990.298160] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.307860] env[62510]: DEBUG oslo_vmware.api [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.349248] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769718, 'name': ReconfigVM_Task, 'duration_secs': 0.372138} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.349565] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8/volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1990.354697] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e945417-83a2-45ee-8c46-ee77bc31e4e2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.374445] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1990.374445] env[62510]: value = "task-1769721" [ 1990.374445] env[62510]: _type = "Task" [ 1990.374445] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.381404] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769721, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.469355] env[62510]: DEBUG oslo_concurrency.lockutils [req-2fbf98c9-f046-4292-834d-7ef1367accf4 req-2898f680-f588-43d6-ad4f-8e11d55d0d5d service nova] Releasing lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1990.586407] env[62510]: INFO nova.compute.manager [-] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Took 1.26 seconds to deallocate network for instance. [ 1990.632738] env[62510]: DEBUG oslo_concurrency.lockutils [None req-9fd84bf1-add9-444f-a593-0022cc055b21 tempest-ServerMetadataTestJSON-2143241213 tempest-ServerMetadataTestJSON-2143241213-project-member] Lock "bf62d0ce-c0e6-4a77-ab05-ac912ec5530f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.623s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.778858] env[62510]: INFO nova.compute.manager [-] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Took 1.46 seconds to deallocate network for instance. [ 1990.809754] env[62510]: DEBUG oslo_vmware.api [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769720, 'name': PowerOffVM_Task, 'duration_secs': 0.183967} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.810011] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1990.810225] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1990.810466] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55d37dcc-a04c-4314-ab0f-c793a14af628 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.825734] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.825977] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.826196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1990.826419] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.826596] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.828721] env[62510]: INFO nova.compute.manager [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Terminating instance [ 1990.883016] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769721, 'name': ReconfigVM_Task, 'duration_secs': 0.162995} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.883325] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367499', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'name': 'volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f39e74c3-eb58-4d28-a489-73d2de1e9bef', 'attached_at': '', 'detached_at': '', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'serial': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1990.883881] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b348bb9-9944-4c46-9e47-5388d3dfcacb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.890389] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1990.890389] env[62510]: value = "task-1769723" [ 1990.890389] env[62510]: _type = "Task" [ 1990.890389] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.898500] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769723, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.939037] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1990.939262] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1990.939453] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Deleting the datastore file [datastore1] abf15987-86cc-4fdc-be9a-efd0448ce9ca {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1990.939753] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36d232d3-14fa-40a9-9954-5e381a02aace {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.946438] env[62510]: DEBUG oslo_vmware.api [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1990.946438] env[62510]: value = "task-1769724" [ 1990.946438] env[62510]: _type = "Task" [ 1990.946438] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.954616] env[62510]: DEBUG oslo_vmware.api [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.095331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.095331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1991.095331] env[62510]: DEBUG nova.objects.instance [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lazy-loading 'resources' on Instance uuid 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1991.225638] env[62510]: DEBUG nova.compute.manager [req-21d5472c-282b-4885-a058-d0160f738ef6 req-f8537dea-19f7-45cb-803d-89bb3c94d329 service nova] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Received event network-vif-deleted-f5b21632-114e-43ff-8c8e-a6ff44e674eb {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1991.285598] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.334185] env[62510]: DEBUG nova.compute.manager [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1991.334185] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1991.334678] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e92845-ac86-4741-96e5-6acd0ac59fc9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.342860] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1991.343107] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a38d8bd3-1e04-4410-b53a-fadc0bd3b560 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.350709] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1991.350709] env[62510]: value = "task-1769725" [ 1991.350709] env[62510]: _type = "Task" [ 1991.350709] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.359780] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769725, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.400154] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769723, 'name': Rename_Task, 'duration_secs': 0.146935} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.400433] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1991.400734] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89b940ad-1847-4239-8c95-cc89ead81546 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.407974] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 1991.407974] env[62510]: value = "task-1769726" [ 1991.407974] env[62510]: _type = "Task" [ 1991.407974] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.418113] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769726, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.461898] env[62510]: DEBUG oslo_vmware.api [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216515} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.462566] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1991.462566] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1991.462683] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1991.462811] env[62510]: INFO nova.compute.manager [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1991.463076] env[62510]: DEBUG oslo.service.loopingcall [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1991.463276] env[62510]: DEBUG nova.compute.manager [-] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1991.463376] env[62510]: DEBUG nova.network.neutron [-] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1991.767070] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8808a106-c53d-46f8-bf16-59e5c057f8b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.775286] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85afbd93-b7be-4431-b5a1-5ea2cfbe0244 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.806692] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ec9ece-6952-4576-b8f0-7f553230889e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.814221] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ea4993-4132-48f5-a953-120d209f3f3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.827672] env[62510]: DEBUG nova.compute.provider_tree [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1991.862139] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769725, 'name': PowerOffVM_Task, 'duration_secs': 0.221741} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.862423] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1991.862634] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1991.864954] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5702d3a2-ee31-4c1d-8390-159be0bd3b0e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.878933] env[62510]: DEBUG nova.compute.manager [req-512a61b6-8280-4bd8-9b0c-3ded04a1d9d0 req-24c1395c-ee4d-4baa-931f-16ef3db159de service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Received event network-vif-deleted-f4bf4471-4bf0-485f-80a8-2548fbf3e100 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1991.879232] env[62510]: INFO nova.compute.manager [req-512a61b6-8280-4bd8-9b0c-3ded04a1d9d0 req-24c1395c-ee4d-4baa-931f-16ef3db159de service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Neutron deleted interface f4bf4471-4bf0-485f-80a8-2548fbf3e100; detaching it from the instance and deleting it from the info cache [ 1991.879318] env[62510]: DEBUG nova.network.neutron [req-512a61b6-8280-4bd8-9b0c-3ded04a1d9d0 req-24c1395c-ee4d-4baa-931f-16ef3db159de service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.918604] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769726, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.940091] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1991.940091] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1991.940091] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleting the datastore file [datastore1] 14a54dac-d2b8-4618-86c8-ab2d08bae005 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1991.940091] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a0c2b7e-3b87-43f1-a675-f0e7d896e2a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.947234] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for the task: (returnval){ [ 1991.947234] env[62510]: value = "task-1769729" [ 1991.947234] env[62510]: _type = "Task" [ 1991.947234] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.955845] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769729, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.332278] env[62510]: DEBUG nova.scheduler.client.report [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1992.341075] env[62510]: DEBUG nova.network.neutron [-] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.385029] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-174690a4-e709-4977-9eb7-733a3445895c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.395296] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded8e536-3d48-4b1c-9250-636bd7654424 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.419476] env[62510]: DEBUG oslo_vmware.api [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769726, 'name': PowerOnVM_Task, 'duration_secs': 0.680382} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.419784] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1992.420014] env[62510]: INFO nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Took 4.41 seconds to spawn the instance on the hypervisor. [ 1992.420318] env[62510]: DEBUG nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1992.421380] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdda5d5f-8b21-49cd-ab63-c7ac45b2872e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.433444] env[62510]: DEBUG nova.compute.manager [req-512a61b6-8280-4bd8-9b0c-3ded04a1d9d0 req-24c1395c-ee4d-4baa-931f-16ef3db159de service nova] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Detach interface failed, port_id=f4bf4471-4bf0-485f-80a8-2548fbf3e100, reason: Instance abf15987-86cc-4fdc-be9a-efd0448ce9ca could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 1992.456528] env[62510]: DEBUG oslo_vmware.api [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Task: {'id': task-1769729, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.387272} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.456809] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1992.456999] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1992.457205] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1992.457411] env[62510]: INFO nova.compute.manager [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1992.457642] env[62510]: DEBUG oslo.service.loopingcall [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1992.457836] env[62510]: DEBUG nova.compute.manager [-] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1992.457937] env[62510]: DEBUG nova.network.neutron [-] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1992.836386] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.838764] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.553s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.839082] env[62510]: DEBUG nova.objects.instance [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'resources' on Instance uuid 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1992.842981] env[62510]: INFO nova.compute.manager [-] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Took 1.38 seconds to deallocate network for instance. [ 1992.870567] env[62510]: INFO nova.scheduler.client.report [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Deleted allocations for instance 2f7b02e8-f658-448f-b6e6-9bfa94c74da4 [ 1992.951946] env[62510]: INFO nova.compute.manager [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Took 14.52 seconds to build instance. [ 1993.202424] env[62510]: DEBUG nova.network.neutron [-] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.349392] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.382172] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a195657e-a0cb-4152-be28-05685344d35b tempest-ServerRescueNegativeTestJSON-1229876436 tempest-ServerRescueNegativeTestJSON-1229876436-project-member] Lock "2f7b02e8-f658-448f-b6e6-9bfa94c74da4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.305s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.454406] env[62510]: DEBUG oslo_concurrency.lockutils [None req-1b4c3d66-761f-4fb7-b536-a2bde30dfd80 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.034s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.506172] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7724c7a-4911-4066-92e1-9b5992d44ab9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.514659] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93037f37-86fc-49a0-aaac-ef95c58e701c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.546951] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263033f0-8255-4ae1-9e50-2712c5850319 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.554899] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21db4b7-1a88-4653-b9c1-288c255f15c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.568028] env[62510]: DEBUG nova.compute.provider_tree [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1993.651796] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.652201] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1993.706420] env[62510]: INFO nova.compute.manager [-] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Took 1.25 seconds to deallocate network for instance. [ 1993.748781] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "9956e5d2-edda-47af-a3df-743ebed1154b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.749057] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "9956e5d2-edda-47af-a3df-743ebed1154b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.749285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "9956e5d2-edda-47af-a3df-743ebed1154b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.749485] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "9956e5d2-edda-47af-a3df-743ebed1154b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.749670] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "9956e5d2-edda-47af-a3df-743ebed1154b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.752062] env[62510]: INFO nova.compute.manager [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Terminating instance [ 1993.909672] env[62510]: DEBUG nova.compute.manager [req-bf8becb2-f019-493f-a0ff-485cabc5b809 req-4d0079a2-29dc-4ce4-8ff1-586cf156791c service nova] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Received event network-vif-deleted-9015bc32-b9ad-4846-a019-0a10e61e5218 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1994.095802] env[62510]: ERROR nova.scheduler.client.report [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [req-351aaa43-22f7-4356-a0d4-e634ccfc869b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-351aaa43-22f7-4356-a0d4-e634ccfc869b"}]} [ 1994.111693] env[62510]: DEBUG nova.scheduler.client.report [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1994.125779] env[62510]: DEBUG nova.scheduler.client.report [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1994.125990] env[62510]: DEBUG nova.compute.provider_tree [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1994.136693] env[62510]: DEBUG nova.scheduler.client.report [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1994.155389] env[62510]: DEBUG nova.scheduler.client.report [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1994.167733] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1994.167733] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 1994.214027] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.256180] env[62510]: DEBUG nova.compute.manager [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1994.256824] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1994.264124] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ced6f9e-c745-444c-870f-68069eeb08a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.275166] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1994.275166] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9052355d-d68b-4e18-aec3-141574e672e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.282788] env[62510]: DEBUG oslo_vmware.api [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1994.282788] env[62510]: value = "task-1769730" [ 1994.282788] env[62510]: _type = "Task" [ 1994.282788] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.294428] env[62510]: DEBUG oslo_vmware.api [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769730, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.357727] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1994.357951] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367507', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'name': 'volume-adb312a1-8d6e-418b-86ab-664579515ac4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '22002fc1-647e-4e65-a5f0-c3a34575985f', 'attached_at': '', 'detached_at': '', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'serial': 'adb312a1-8d6e-418b-86ab-664579515ac4'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1994.359032] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-135f2220-1e70-411b-9a0b-143298e3c6b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.379286] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2eb140-4842-4e68-b325-b6998745e8d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.408537] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] volume-adb312a1-8d6e-418b-86ab-664579515ac4/volume-adb312a1-8d6e-418b-86ab-664579515ac4.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1994.411551] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d66557a-f5e9-4710-8fd9-dea5ac0734d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.426549] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acc7166-fc4a-457a-827c-8d92d26d08bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.436056] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b93920-7e8c-4c98-b65c-0d16784f7169 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.439425] env[62510]: DEBUG oslo_vmware.api [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1994.439425] env[62510]: value = "task-1769731" [ 1994.439425] env[62510]: _type = "Task" [ 1994.439425] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.468467] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dad498-c513-4945-aa7b-9462ee760a50 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.474465] env[62510]: DEBUG oslo_vmware.api [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769731, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.479834] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca45aac-02bf-44b7-ae95-75181f5a5815 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.495119] env[62510]: DEBUG nova.compute.provider_tree [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1994.711161] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.711330] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.711505] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1994.799813] env[62510]: DEBUG oslo_vmware.api [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769730, 'name': PowerOffVM_Task, 'duration_secs': 0.199086} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.799813] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1994.799813] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1994.799813] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8da3661-f989-45ff-815f-fe82945f32be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.895270] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1994.895270] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1994.895270] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleting the datastore file [datastore1] 9956e5d2-edda-47af-a3df-743ebed1154b {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1994.895485] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ce4de06-084f-4dc4-8f8c-8ce474033328 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.903931] env[62510]: DEBUG oslo_vmware.api [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 1994.903931] env[62510]: value = "task-1769733" [ 1994.903931] env[62510]: _type = "Task" [ 1994.903931] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.912138] env[62510]: DEBUG oslo_vmware.api [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769733, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.959985] env[62510]: DEBUG oslo_vmware.api [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769731, 'name': ReconfigVM_Task, 'duration_secs': 0.372879} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.960939] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfigured VM instance instance-00000072 to attach disk [datastore1] volume-adb312a1-8d6e-418b-86ab-664579515ac4/volume-adb312a1-8d6e-418b-86ab-664579515ac4.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1994.970288] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7730404-ea73-4c9d-987f-3f2bfd82cd95 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.997054] env[62510]: DEBUG oslo_vmware.api [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 1994.997054] env[62510]: value = "task-1769734" [ 1994.997054] env[62510]: _type = "Task" [ 1994.997054] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.008998] env[62510]: DEBUG oslo_vmware.api [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769734, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.038683] env[62510]: DEBUG nova.scheduler.client.report [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1995.039025] env[62510]: DEBUG nova.compute.provider_tree [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 166 to 167 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1995.039259] env[62510]: DEBUG nova.compute.provider_tree [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1995.414233] env[62510]: DEBUG oslo_vmware.api [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769733, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181912} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.414523] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1995.414676] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1995.414862] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1995.415081] env[62510]: INFO nova.compute.manager [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1995.415334] env[62510]: DEBUG oslo.service.loopingcall [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1995.415531] env[62510]: DEBUG nova.compute.manager [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1995.415631] env[62510]: DEBUG nova.network.neutron [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1995.508921] env[62510]: DEBUG oslo_vmware.api [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769734, 'name': ReconfigVM_Task, 'duration_secs': 0.1649} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.508921] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367507', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'name': 'volume-adb312a1-8d6e-418b-86ab-664579515ac4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '22002fc1-647e-4e65-a5f0-c3a34575985f', 'attached_at': '', 'detached_at': '', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'serial': 'adb312a1-8d6e-418b-86ab-664579515ac4'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1995.546338] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.705s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.546625] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.197s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.546832] env[62510]: DEBUG nova.objects.instance [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lazy-loading 'resources' on Instance uuid abf15987-86cc-4fdc-be9a-efd0448ce9ca {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1995.563161] env[62510]: INFO nova.scheduler.client.report [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted allocations for instance 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3 [ 1995.955220] env[62510]: DEBUG nova.compute.manager [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Received event network-changed-a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1995.955220] env[62510]: DEBUG nova.compute.manager [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Refreshing instance network info cache due to event network-changed-a6e31bab-0459-42fe-8756-d37cc3fa3e88. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1995.955220] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] Acquiring lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1996.066402] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [{"id": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "address": "fa:16:3e:7d:cb:3f", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e31bab-04", "ovs_interfaceid": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.073989] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e8f0f9bd-f7c2-456b-9542-d3bd91d46ccf tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.864s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.221433] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d79276c-9663-4de5-bb73-2bfad0bf6d6b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.232958] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28875596-cbb9-4240-b250-072f0557da43 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.261752] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9297412-d208-47f2-8fad-4edd6efb3627 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.269173] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32a5d45-19e8-4b3b-b56d-e3db9fd8c7a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.286389] env[62510]: DEBUG nova.compute.provider_tree [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1996.287833] env[62510]: DEBUG nova.compute.manager [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1996.433853] env[62510]: DEBUG nova.network.neutron [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.566954] env[62510]: DEBUG nova.objects.instance [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'flavor' on Instance uuid 22002fc1-647e-4e65-a5f0-c3a34575985f {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1996.570816] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.570998] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 1996.571445] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] Acquired lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1996.571660] env[62510]: DEBUG nova.network.neutron [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Refreshing network info cache for port a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1996.574876] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.575568] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.575683] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.575860] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.576017] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.576167] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.576304] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 1996.576484] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1996.792867] env[62510]: DEBUG nova.scheduler.client.report [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1996.812174] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1996.936792] env[62510]: INFO nova.compute.manager [-] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Took 1.52 seconds to deallocate network for instance. [ 1997.075200] env[62510]: DEBUG oslo_concurrency.lockutils [None req-22b39d3b-b224-4609-bd8b-8a9da5202f11 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.822s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.081824] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.300158] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.754s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.302170] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.089s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.302370] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.304176] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.492s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.323312] env[62510]: INFO nova.scheduler.client.report [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Deleted allocations for instance abf15987-86cc-4fdc-be9a-efd0448ce9ca [ 1997.330188] env[62510]: INFO nova.scheduler.client.report [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Deleted allocations for instance 14a54dac-d2b8-4618-86c8-ab2d08bae005 [ 1997.391233] env[62510]: DEBUG nova.network.neutron [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updated VIF entry in instance network info cache for port a6e31bab-0459-42fe-8756-d37cc3fa3e88. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1997.391600] env[62510]: DEBUG nova.network.neutron [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [{"id": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "address": "fa:16:3e:7d:cb:3f", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6e31bab-04", "ovs_interfaceid": "a6e31bab-0459-42fe-8756-d37cc3fa3e88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1997.443718] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.809501] env[62510]: INFO nova.compute.claims [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1997.830815] env[62510]: DEBUG oslo_concurrency.lockutils [None req-213080b1-5137-4cd7-9808-5142f64739a7 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "abf15987-86cc-4fdc-be9a-efd0448ce9ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.066s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.836878] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2862b852-e0b8-498a-9747-e24a13a8fabd tempest-DeleteServersTestJSON-1994223681 tempest-DeleteServersTestJSON-1994223681-project-member] Lock "14a54dac-d2b8-4618-86c8-ab2d08bae005" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.011s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.893686] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] Releasing lock "refresh_cache-4e735bb6-f167-4c2b-b44e-d2dd3040603d" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.893944] env[62510]: DEBUG nova.compute.manager [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Received event network-changed-453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1997.894128] env[62510]: DEBUG nova.compute.manager [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Refreshing instance network info cache due to event network-changed-453df25e-58eb-42b3-aa0a-3771b21d6b25. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 1997.894337] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] Acquiring lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1997.894476] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] Acquired lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1997.894636] env[62510]: DEBUG nova.network.neutron [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Refreshing network info cache for port 453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1997.978078] env[62510]: DEBUG nova.compute.manager [req-6ca99418-efba-492c-9e4d-e09472775b16 req-f00da027-12b4-480b-858c-96bf17d8ff33 service nova] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Received event network-vif-deleted-aa1b717d-79b9-457c-829a-a4e12f0187c4 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 1998.126607] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.126877] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.315037] env[62510]: INFO nova.compute.resource_tracker [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating resource usage from migration eb277a30-dc24-4d19-8bd9-02104d08ca83 [ 1998.425737] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0e05b6-0c04-4573-88da-7457f4cd3e36 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.435340] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ee4936-02a5-4592-af67-872610f48846 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.467450] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5ae7be-e2d2-4c23-a58e-07efbbc3a8b8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.474900] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95e1cd1-e209-4bdf-80c0-77d24160a4b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.488014] env[62510]: DEBUG nova.compute.provider_tree [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1998.629712] env[62510]: DEBUG nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1998.675211] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.675450] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.675671] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1998.675870] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1998.676054] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1998.678201] env[62510]: INFO nova.compute.manager [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Terminating instance [ 1998.710925] env[62510]: DEBUG nova.network.neutron [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updated VIF entry in instance network info cache for port 453df25e-58eb-42b3-aa0a-3771b21d6b25. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1998.711287] env[62510]: DEBUG nova.network.neutron [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [{"id": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "address": "fa:16:3e:5b:92:84", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap453df25e-58", "ovs_interfaceid": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.991598] env[62510]: DEBUG nova.scheduler.client.report [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1999.151433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.182264] env[62510]: DEBUG nova.compute.manager [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1999.182513] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1999.183426] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916e3a0a-edd8-47ca-aba5-7d69d72c8856 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.191087] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1999.191313] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05708657-66b0-4481-b9a1-191782416f59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.198460] env[62510]: DEBUG oslo_vmware.api [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1999.198460] env[62510]: value = "task-1769736" [ 1999.198460] env[62510]: _type = "Task" [ 1999.198460] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.206413] env[62510]: DEBUG oslo_vmware.api [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.214077] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e9a4b3e-b2f1-4ba0-b1af-573f85a82782 req-33f9c8b7-1073-4f23-b935-aa7d22b093a0 service nova] Releasing lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1999.461352] env[62510]: DEBUG nova.compute.manager [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 1999.496814] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.192s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.497825] env[62510]: INFO nova.compute.manager [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Migrating [ 1999.505071] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.423s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.505264] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1999.505460] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1999.505760] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.062s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1999.505958] env[62510]: DEBUG nova.objects.instance [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'resources' on Instance uuid 9956e5d2-edda-47af-a3df-743ebed1154b {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.510235] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1e222a-5d40-4265-b3b3-28fce4026b98 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.526317] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98213707-8f2f-4d45-b0c2-d622998d5231 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.543924] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc69a22-3315-4166-b267-aabb228096cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.550795] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef77383-01f0-4b79-8423-f8c05cf33656 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.583924] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179437MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1999.584111] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1999.704032] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50421ff-e7ff-4871-ba0b-9a82dc5cdf17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.712018] env[62510]: DEBUG oslo_vmware.api [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769736, 'name': PowerOffVM_Task, 'duration_secs': 0.469366} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.713831] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1999.714038] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1999.714324] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afe1926a-976e-48ac-a975-b0d9788c6452 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.716490] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67631607-6397-4ebe-9110-4cae15e5b6bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.747732] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531986ea-2e51-4042-9b44-dded22c43aad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.755498] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b9669c-a521-49a1-b633-e31ed487e05e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.768904] env[62510]: DEBUG nova.compute.provider_tree [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1999.827817] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1999.828045] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1999.828234] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Deleting the datastore file [datastore1] a5a9c086-6ae2-4644-acfa-7c147593b8d2 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1999.828562] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-938f31a6-e428-47cb-b08a-cebbbf9800e9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.834739] env[62510]: DEBUG oslo_vmware.api [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for the task: (returnval){ [ 1999.834739] env[62510]: value = "task-1769738" [ 1999.834739] env[62510]: _type = "Task" [ 1999.834739] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.842562] env[62510]: DEBUG oslo_vmware.api [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.977991] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.022165] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.022378] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2000.022548] env[62510]: DEBUG nova.network.neutron [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2000.272946] env[62510]: DEBUG nova.scheduler.client.report [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2000.347860] env[62510]: DEBUG oslo_vmware.api [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Task: {'id': task-1769738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.346319} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.348163] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2000.348352] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2000.348571] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2000.348757] env[62510]: INFO nova.compute.manager [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2000.349011] env[62510]: DEBUG oslo.service.loopingcall [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2000.349217] env[62510]: DEBUG nova.compute.manager [-] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2000.349315] env[62510]: DEBUG nova.network.neutron [-] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2000.690158] env[62510]: DEBUG nova.compute.manager [req-9900f209-7c6c-43fb-a50c-a112e829b3f8 req-0cc5a32e-43f4-4f45-a5d2-8a4ba9947952 service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Received event network-vif-deleted-54c12d32-3e2e-4ec3-a6a0-de7c5219efcc {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2000.690158] env[62510]: INFO nova.compute.manager [req-9900f209-7c6c-43fb-a50c-a112e829b3f8 req-0cc5a32e-43f4-4f45-a5d2-8a4ba9947952 service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Neutron deleted interface 54c12d32-3e2e-4ec3-a6a0-de7c5219efcc; detaching it from the instance and deleting it from the info cache [ 2000.690158] env[62510]: DEBUG nova.network.neutron [req-9900f209-7c6c-43fb-a50c-a112e829b3f8 req-0cc5a32e-43f4-4f45-a5d2-8a4ba9947952 service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.778485] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.273s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.780812] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.629s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.783484] env[62510]: INFO nova.compute.claims [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2000.804036] env[62510]: INFO nova.scheduler.client.report [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted allocations for instance 9956e5d2-edda-47af-a3df-743ebed1154b [ 2000.934635] env[62510]: DEBUG nova.network.neutron [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [{"id": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "address": "fa:16:3e:5b:92:84", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap453df25e-58", "ovs_interfaceid": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.121747] env[62510]: DEBUG nova.network.neutron [-] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.192057] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5bbd3a1-eca0-4d8c-b930-4af3a7ee33a7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.201268] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ebbcae-6973-4f2a-b1fc-02d8ee1ecbd6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.228940] env[62510]: DEBUG nova.compute.manager [req-9900f209-7c6c-43fb-a50c-a112e829b3f8 req-0cc5a32e-43f4-4f45-a5d2-8a4ba9947952 service nova] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Detach interface failed, port_id=54c12d32-3e2e-4ec3-a6a0-de7c5219efcc, reason: Instance a5a9c086-6ae2-4644-acfa-7c147593b8d2 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2001.310674] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d9f4dfb1-ca23-4c45-b0ad-1f00df049a56 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "9956e5d2-edda-47af-a3df-743ebed1154b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.562s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.437260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2001.625075] env[62510]: INFO nova.compute.manager [-] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Took 1.27 seconds to deallocate network for instance. [ 2001.949136] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8796924a-a233-4770-a992-9d22e2250e5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.954161] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4172ae45-8ebd-438e-bcfb-8dbd0335096b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.989319] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef91c0-43e9-4d9f-b101-093b06cc94f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.997267] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f512b59-1e32-4125-b7f7-473ba37b6357 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.011673] env[62510]: DEBUG nova.compute.provider_tree [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2002.132141] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.515126] env[62510]: DEBUG nova.scheduler.client.report [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2002.956807] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73e4025-ef1e-4c20-9abb-75abfff65584 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.976451] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance 'f39e74c3-eb58-4d28-a489-73d2de1e9bef' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2003.019993] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.239s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.020545] env[62510]: DEBUG nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2003.023846] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.440s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.047835] env[62510]: DEBUG nova.compute.manager [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 2003.482489] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2003.482730] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fb61da12-83c1-4487-b65d-298ee99c319f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.490071] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2003.490071] env[62510]: value = "task-1769739" [ 2003.490071] env[62510]: _type = "Task" [ 2003.490071] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.498977] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769739, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.527826] env[62510]: DEBUG nova.compute.utils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2003.533440] env[62510]: DEBUG nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2003.533608] env[62510]: DEBUG nova.network.neutron [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2003.569300] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.574070] env[62510]: DEBUG nova.policy [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93399cd69f4245188fd39bde29ee3d5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11c021c6b45c452f83732fe578e576f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2003.846675] env[62510]: DEBUG nova.network.neutron [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Successfully created port: 6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2004.000653] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769739, 'name': PowerOffVM_Task, 'duration_secs': 0.210363} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.000938] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2004.001146] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance 'f39e74c3-eb58-4d28-a489-73d2de1e9bef' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2004.037098] env[62510]: DEBUG nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2004.040721] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Applying migration context for instance f39e74c3-eb58-4d28-a489-73d2de1e9bef as it has an incoming, in-progress migration eb277a30-dc24-4d19-8bd9-02104d08ca83. Migration status is migrating {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2004.042980] env[62510]: INFO nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating resource usage from migration 1526651e-ada9-4a8e-a6a6-c130f982d0a2 [ 2004.042980] env[62510]: INFO nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating resource usage from migration eb277a30-dc24-4d19-8bd9-02104d08ca83 [ 2004.042980] env[62510]: INFO nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating resource usage from migration cd18239d-4bf8-4b7c-9fc5-a02ec9c2ec54 [ 2004.061792] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2004.061912] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 92cb4e54-a00e-4974-b134-22d302932e32 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2004.062383] env[62510]: WARNING nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance a5a9c086-6ae2-4644-acfa-7c147593b8d2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2004.062383] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance fe3b3380-69bb-4563-abf2-9f0db439d31a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2004.062383] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Migration eb277a30-dc24-4d19-8bd9-02104d08ca83 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2004.062383] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f39e74c3-eb58-4d28-a489-73d2de1e9bef actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2004.062582] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Migration 1526651e-ada9-4a8e-a6a6-c130f982d0a2 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2004.062618] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 31a181cd-b7cd-42c0-960d-e7d28987dc19 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2004.062719] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 22002fc1-647e-4e65-a5f0-c3a34575985f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2004.062835] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Migration cd18239d-4bf8-4b7c-9fc5-a02ec9c2ec54 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2004.062985] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2004.063198] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2004.063335] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2004.192680] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766fb10e-2e04-45b5-9fd7-d99883b06925 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.200265] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42702a3c-c18a-4492-925c-f7de697a319b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.233457] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d359a70-6df0-4f92-95f7-7c4e8e06243d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.241498] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b32be2-61d8-4dce-9d6b-a2daf2009ea8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.255408] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2004.508864] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2004.508864] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2004.508864] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2004.508864] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2004.509424] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2004.509424] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2004.509424] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2004.509627] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2004.509663] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2004.509808] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2004.509976] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2004.514992] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73dd2b7b-6584-432f-a706-8e51598936a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.532298] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2004.532298] env[62510]: value = "task-1769740" [ 2004.532298] env[62510]: _type = "Task" [ 2004.532298] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.540478] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.758433] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2005.042566] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769740, 'name': ReconfigVM_Task, 'duration_secs': 0.170318} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.042873] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance 'f39e74c3-eb58-4d28-a489-73d2de1e9bef' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2005.050676] env[62510]: DEBUG nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2005.076536] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2005.076788] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2005.076944] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2005.077139] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2005.077287] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2005.077435] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2005.077645] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2005.077813] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2005.077996] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2005.078188] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2005.078394] env[62510]: DEBUG nova.virt.hardware [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2005.079349] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12709503-470e-4768-8826-962fd124ce4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.087439] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54d2509-5e1a-4e7c-8dfb-ce82a6e34e91 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.232532] env[62510]: DEBUG nova.compute.manager [req-d6139729-f94c-47cf-8ead-a7d34a51e690 req-65765df2-7f70-43c6-9bbc-922e2d638590 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Received event network-vif-plugged-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2005.232753] env[62510]: DEBUG oslo_concurrency.lockutils [req-d6139729-f94c-47cf-8ead-a7d34a51e690 req-65765df2-7f70-43c6-9bbc-922e2d638590 service nova] Acquiring lock "31a181cd-b7cd-42c0-960d-e7d28987dc19-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.232956] env[62510]: DEBUG oslo_concurrency.lockutils [req-d6139729-f94c-47cf-8ead-a7d34a51e690 req-65765df2-7f70-43c6-9bbc-922e2d638590 service nova] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.233137] env[62510]: DEBUG oslo_concurrency.lockutils [req-d6139729-f94c-47cf-8ead-a7d34a51e690 req-65765df2-7f70-43c6-9bbc-922e2d638590 service nova] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.233302] env[62510]: DEBUG nova.compute.manager [req-d6139729-f94c-47cf-8ead-a7d34a51e690 req-65765df2-7f70-43c6-9bbc-922e2d638590 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] No waiting events found dispatching network-vif-plugged-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2005.233468] env[62510]: WARNING nova.compute.manager [req-d6139729-f94c-47cf-8ead-a7d34a51e690 req-65765df2-7f70-43c6-9bbc-922e2d638590 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Received unexpected event network-vif-plugged-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c for instance with vm_state building and task_state spawning. [ 2005.263603] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2005.263820] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.240s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.264092] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 5.286s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.330278] env[62510]: DEBUG nova.network.neutron [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Successfully updated port: 6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2005.549853] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2005.550250] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2005.550250] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2005.550442] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2005.550590] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2005.550740] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2005.550968] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2005.551156] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2005.551329] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2005.551494] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2005.551672] env[62510]: DEBUG nova.virt.hardware [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2005.557216] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2005.557511] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ee45e72-27a7-4b75-840e-cdf57f92b76c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.577530] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2005.577530] env[62510]: value = "task-1769741" [ 2005.577530] env[62510]: _type = "Task" [ 2005.577530] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.585297] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769741, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.769432] env[62510]: INFO nova.compute.claims [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2005.832749] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.832749] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.832749] env[62510]: DEBUG nova.network.neutron [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2006.088175] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769741, 'name': ReconfigVM_Task, 'duration_secs': 0.155807} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.088468] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2006.089289] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb16dc9d-f3e0-47e1-9a25-ce67f10f259a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.111495] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8/volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2006.111753] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-14ec6578-a09b-4e43-9fa3-37f107a78fb4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.129948] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2006.129948] env[62510]: value = "task-1769742" [ 2006.129948] env[62510]: _type = "Task" [ 2006.129948] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.137841] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769742, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.276157] env[62510]: INFO nova.compute.resource_tracker [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating resource usage from migration 1526651e-ada9-4a8e-a6a6-c130f982d0a2 [ 2006.363924] env[62510]: DEBUG nova.network.neutron [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2006.424399] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdcdcd6-fee7-4e91-a0ff-e95ba092053e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.434291] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729a8841-ec6f-4857-a696-d32ce6910d33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.467133] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7729f0af-ece1-41a6-a0ed-cdcc1c589b39 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.475050] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a47c49-2f87-4b3e-a1f9-a83c7d55d172 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.489089] env[62510]: DEBUG nova.compute.provider_tree [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2006.502348] env[62510]: DEBUG nova.network.neutron [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updating instance_info_cache with network_info: [{"id": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "address": "fa:16:3e:34:8d:a7", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae6f8a4-f9", "ovs_interfaceid": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.555261] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.555595] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.555729] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "92cb4e54-a00e-4974-b134-22d302932e32-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.555906] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.556089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.558146] env[62510]: INFO nova.compute.manager [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Terminating instance [ 2006.640185] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769742, 'name': ReconfigVM_Task, 'duration_secs': 0.24302} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.640453] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8/volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2006.640727] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance 'f39e74c3-eb58-4d28-a489-73d2de1e9bef' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2006.991937] env[62510]: DEBUG nova.scheduler.client.report [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2007.004233] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.004505] env[62510]: DEBUG nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Instance network_info: |[{"id": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "address": "fa:16:3e:34:8d:a7", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae6f8a4-f9", "ovs_interfaceid": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2007.005405] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:8d:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6eb7e3e9-5cc2-40f1-a6eb-f70f06531667', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ae6f8a4-f91b-4f3f-b94e-a75ba935075c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2007.012936] env[62510]: DEBUG oslo.service.loopingcall [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2007.013431] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2007.013664] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d71eacac-d0d0-49b2-a302-7852f567eb8b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.033865] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2007.033865] env[62510]: value = "task-1769743" [ 2007.033865] env[62510]: _type = "Task" [ 2007.033865] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.041726] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769743, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.061755] env[62510]: DEBUG nova.compute.manager [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2007.061946] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2007.062684] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f942149e-800a-4462-82cb-b0b0213f83c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.069270] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2007.069501] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-47f6f99f-f905-47e6-a68e-12192ec750f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.075051] env[62510]: DEBUG oslo_vmware.api [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2007.075051] env[62510]: value = "task-1769744" [ 2007.075051] env[62510]: _type = "Task" [ 2007.075051] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.084308] env[62510]: DEBUG oslo_vmware.api [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.147202] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43f9388-0e42-4fb6-adfe-14427f92cd1e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.167629] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c88225-4761-4d25-b7d1-b6350c79c7f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.188140] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance 'f39e74c3-eb58-4d28-a489-73d2de1e9bef' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2007.367450] env[62510]: DEBUG nova.compute.manager [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Received event network-changed-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2007.367628] env[62510]: DEBUG nova.compute.manager [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Refreshing instance network info cache due to event network-changed-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2007.367892] env[62510]: DEBUG oslo_concurrency.lockutils [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] Acquiring lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.368091] env[62510]: DEBUG oslo_concurrency.lockutils [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] Acquired lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.368292] env[62510]: DEBUG nova.network.neutron [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Refreshing network info cache for port 6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2007.497358] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.233s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.497523] env[62510]: INFO nova.compute.manager [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Migrating [ 2007.504102] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.373s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.504296] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.506294] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.937s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.526677] env[62510]: INFO nova.scheduler.client.report [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Deleted allocations for instance a5a9c086-6ae2-4644-acfa-7c147593b8d2 [ 2007.544308] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769743, 'name': CreateVM_Task, 'duration_secs': 0.437446} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.544369] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2007.551541] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.551728] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.552224] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2007.552506] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64559862-ab33-4190-ba49-a9196d321161 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.557376] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2007.557376] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52820a0e-df17-f1cf-4a2e-ab0981f8f1fc" [ 2007.557376] env[62510]: _type = "Task" [ 2007.557376] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.565444] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52820a0e-df17-f1cf-4a2e-ab0981f8f1fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.583858] env[62510]: DEBUG oslo_vmware.api [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769744, 'name': PowerOffVM_Task, 'duration_secs': 0.209014} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.584131] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2007.584310] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2007.584575] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0b9faf5-ddf8-45d6-8099-d0d965e414ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.753632] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2007.753866] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2007.754059] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Deleting the datastore file [datastore1] 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2007.754311] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a08b0756-015a-43ba-90e3-affa8675d424 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.759990] env[62510]: DEBUG oslo_vmware.api [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2007.759990] env[62510]: value = "task-1769746" [ 2007.759990] env[62510]: _type = "Task" [ 2007.759990] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.767528] env[62510]: DEBUG oslo_vmware.api [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769746, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.012576] env[62510]: INFO nova.compute.claims [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2008.016046] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.016222] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.016446] env[62510]: DEBUG nova.network.neutron [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2008.032942] env[62510]: DEBUG oslo_concurrency.lockutils [None req-74998b24-42df-4b7b-b07d-1f1fc05a8cf9 tempest-ServerRescueTestJSON-1013553629 tempest-ServerRescueTestJSON-1013553629-project-member] Lock "a5a9c086-6ae2-4644-acfa-7c147593b8d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.357s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.056895] env[62510]: DEBUG nova.network.neutron [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updated VIF entry in instance network info cache for port 6ae6f8a4-f91b-4f3f-b94e-a75ba935075c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2008.057245] env[62510]: DEBUG nova.network.neutron [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updating instance_info_cache with network_info: [{"id": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "address": "fa:16:3e:34:8d:a7", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae6f8a4-f9", "ovs_interfaceid": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2008.070429] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52820a0e-df17-f1cf-4a2e-ab0981f8f1fc, 'name': SearchDatastore_Task, 'duration_secs': 0.009643} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.070692] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.071492] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2008.071492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.071492] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.071764] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2008.071764] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0738dc94-4a4a-486b-9231-744407c25cf7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.080727] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2008.080955] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2008.081690] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e793a66-cd82-4241-8522-6542404a651d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.086592] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2008.086592] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52dcca52-039b-a11c-d71f-b63a944f4dba" [ 2008.086592] env[62510]: _type = "Task" [ 2008.086592] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.094064] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dcca52-039b-a11c-d71f-b63a944f4dba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.270374] env[62510]: DEBUG oslo_vmware.api [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769746, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132438} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.270596] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2008.270812] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2008.270957] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2008.271152] env[62510]: INFO nova.compute.manager [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2008.271489] env[62510]: DEBUG oslo.service.loopingcall [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2008.271606] env[62510]: DEBUG nova.compute.manager [-] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2008.271667] env[62510]: DEBUG nova.network.neutron [-] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2008.521846] env[62510]: INFO nova.compute.resource_tracker [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating resource usage from migration cd18239d-4bf8-4b7c-9fc5-a02ec9c2ec54 [ 2008.565453] env[62510]: DEBUG oslo_concurrency.lockutils [req-bd77f1e5-135c-47a7-9e37-af2faf67ea69 req-41c88679-629d-4e78-8783-0a4d846a3e24 service nova] Releasing lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.601609] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dcca52-039b-a11c-d71f-b63a944f4dba, 'name': SearchDatastore_Task, 'duration_secs': 0.019275} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.605019] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58088da0-9643-4844-9337-d54ccd67e667 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.610507] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2008.610507] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5212d199-2ddb-7e05-7e17-3a6f539a4d45" [ 2008.610507] env[62510]: _type = "Task" [ 2008.610507] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.620766] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5212d199-2ddb-7e05-7e17-3a6f539a4d45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.704918] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72ba919-2999-4940-9c76-05427b2c6d1a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.714781] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305c8f94-5699-407f-94d5-05c0a62834e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.747991] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fe1128-4a1c-4166-9ebe-c75f016ebdc4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.755180] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb99806-4b8d-45d8-a094-ccd6af3723ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.770095] env[62510]: DEBUG nova.compute.provider_tree [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2008.831503] env[62510]: DEBUG nova.network.neutron [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Port 453df25e-58eb-42b3-aa0a-3771b21d6b25 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2009.021009] env[62510]: DEBUG nova.network.neutron [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.120697] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5212d199-2ddb-7e05-7e17-3a6f539a4d45, 'name': SearchDatastore_Task, 'duration_secs': 0.010893} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.121189] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.121293] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 31a181cd-b7cd-42c0-960d-e7d28987dc19/31a181cd-b7cd-42c0-960d-e7d28987dc19.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2009.121466] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64f31785-378e-4326-a226-f671a1018b57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.128766] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2009.128766] env[62510]: value = "task-1769747" [ 2009.128766] env[62510]: _type = "Task" [ 2009.128766] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.135816] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.202962] env[62510]: DEBUG nova.network.neutron [-] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.276279] env[62510]: DEBUG nova.scheduler.client.report [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2009.402484] env[62510]: DEBUG nova.compute.manager [req-f13900b2-95af-4e0f-ae72-0fb21dd6b007 req-0b50605b-e41c-4116-b73a-6c69dfe3b88f service nova] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Received event network-vif-deleted-68c246e2-7126-4f5b-bc52-3c63f14aacf5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2009.524175] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.638659] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769747, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50107} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.639039] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 31a181cd-b7cd-42c0-960d-e7d28987dc19/31a181cd-b7cd-42c0-960d-e7d28987dc19.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2009.639269] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2009.639535] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-047a8ccc-3cff-4011-a607-bd5e2fc566c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.647998] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2009.647998] env[62510]: value = "task-1769748" [ 2009.647998] env[62510]: _type = "Task" [ 2009.647998] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.656135] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.704890] env[62510]: INFO nova.compute.manager [-] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Took 1.43 seconds to deallocate network for instance. [ 2009.779647] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.273s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.779864] env[62510]: INFO nova.compute.manager [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Migrating [ 2009.852955] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2009.853203] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.853377] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.157810] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060762} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.158119] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2010.158955] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3717256-6ef0-43f4-8b48-e2543c29341d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.181278] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 31a181cd-b7cd-42c0-960d-e7d28987dc19/31a181cd-b7cd-42c0-960d-e7d28987dc19.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2010.181442] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-741449f2-7772-4147-a14d-5e24f537aa98 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.201672] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2010.201672] env[62510]: value = "task-1769749" [ 2010.201672] env[62510]: _type = "Task" [ 2010.201672] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.210548] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769749, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.212657] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.212849] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.213274] env[62510]: DEBUG nova.objects.instance [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'resources' on Instance uuid 92cb4e54-a00e-4974-b134-22d302932e32 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2010.296867] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.296867] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.296867] env[62510]: DEBUG nova.network.neutron [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2010.712197] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769749, 'name': ReconfigVM_Task, 'duration_secs': 0.276465} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.712557] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 31a181cd-b7cd-42c0-960d-e7d28987dc19/31a181cd-b7cd-42c0-960d-e7d28987dc19.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2010.713243] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e6975d6-aeea-4919-addb-4e79caf513a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.723251] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2010.723251] env[62510]: value = "task-1769750" [ 2010.723251] env[62510]: _type = "Task" [ 2010.723251] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.732242] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769750, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.863224] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ef1451-4efc-4910-9ae5-7d4ca3b457d9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.871034] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db765dd0-e902-42da-b57a-69ccd7858ffb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.904041] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.904267] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.904443] env[62510]: DEBUG nova.network.neutron [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2010.908116] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a6a4ae-f703-450e-b1e5-a63c710d4232 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.915988] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e59c36-265f-4534-a232-10497f1e597b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.930296] env[62510]: DEBUG nova.compute.provider_tree [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2011.039441] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a52551-6f74-434b-bb8b-35b2a15e0e32 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.060051] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance '22002fc1-647e-4e65-a5f0-c3a34575985f' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2011.064154] env[62510]: DEBUG nova.network.neutron [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.233967] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769750, 'name': Rename_Task, 'duration_secs': 0.155392} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.234280] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2011.234719] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82b6c0ea-4f05-4ae9-9df8-bd242e41b5fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.241224] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2011.241224] env[62510]: value = "task-1769751" [ 2011.241224] env[62510]: _type = "Task" [ 2011.241224] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.248837] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.433666] env[62510]: DEBUG nova.scheduler.client.report [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2011.567524] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2011.567829] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccfcf96e-e5eb-40fb-94ea-37aec346f0da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.569994] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.579948] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2011.579948] env[62510]: value = "task-1769752" [ 2011.579948] env[62510]: _type = "Task" [ 2011.579948] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.595721] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.674937] env[62510]: DEBUG nova.network.neutron [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [{"id": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "address": "fa:16:3e:5b:92:84", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap453df25e-58", "ovs_interfaceid": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.753492] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769751, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.939078] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.726s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.963784] env[62510]: INFO nova.scheduler.client.report [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Deleted allocations for instance 92cb4e54-a00e-4974-b134-22d302932e32 [ 2012.089685] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769752, 'name': PowerOffVM_Task, 'duration_secs': 0.226879} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.089685] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2012.089685] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance '22002fc1-647e-4e65-a5f0-c3a34575985f' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2012.178025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2012.251565] env[62510]: DEBUG oslo_vmware.api [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769751, 'name': PowerOnVM_Task, 'duration_secs': 0.669011} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.251833] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2012.252058] env[62510]: INFO nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Took 7.20 seconds to spawn the instance on the hypervisor. [ 2012.252239] env[62510]: DEBUG nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2012.253051] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9b7983-5895-43d8-b1d8-4372be723ca3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.472593] env[62510]: DEBUG oslo_concurrency.lockutils [None req-cf1e827a-f712-4a8e-ae84-3f747b1bedfb tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "92cb4e54-a00e-4974-b134-22d302932e32" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.916s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.596861] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2012.597130] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2012.597333] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2012.597478] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2012.597624] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2012.597777] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2012.597978] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2012.598154] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2012.598320] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2012.598486] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2012.598695] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2012.605807] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42c9a3d3-267e-45ee-af61-fe9e3ba53442 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.625593] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2012.625593] env[62510]: value = "task-1769753" [ 2012.625593] env[62510]: _type = "Task" [ 2012.625593] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.634618] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769753, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.687401] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b438f2-b0a9-4401-8dc3-8ee971ea89f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.694395] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24120f93-9345-402e-bddf-65473e1fc8e2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.770815] env[62510]: INFO nova.compute.manager [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Took 13.64 seconds to build instance. [ 2012.987955] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.988215] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.084405] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd117ba-95a4-4231-bce0-2075b1d2d9f9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.103770] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance 'f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2013.135447] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769753, 'name': ReconfigVM_Task, 'duration_secs': 0.327746} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.135753] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance '22002fc1-647e-4e65-a5f0-c3a34575985f' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2013.272193] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4a0c9699-014a-4a19-8bed-b9f54f3daa6d tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.145s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.490288] env[62510]: DEBUG nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2013.610206] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2013.610574] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-658ee03c-54cb-4805-8f4d-6b4db89d7c0c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.618196] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2013.618196] env[62510]: value = "task-1769754" [ 2013.618196] env[62510]: _type = "Task" [ 2013.618196] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.628411] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2013.645640] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2013.646053] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2013.646053] env[62510]: DEBUG nova.virt.hardware [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2013.663034] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2013.663967] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51ddfc5b-6ce5-4af5-9bf5-1fd0ab89c61a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.693666] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2013.693666] env[62510]: value = "task-1769755" [ 2013.693666] env[62510]: _type = "Task" [ 2013.693666] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.700112] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.809959] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45391f7c-7834-414e-afdc-b67436d9180b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.831252] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4ec9ca-12db-4088-a5f3-1b605b1d7cc4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.837232] env[62510]: DEBUG nova.compute.manager [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Received event network-changed-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2013.837232] env[62510]: DEBUG nova.compute.manager [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Refreshing instance network info cache due to event network-changed-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2013.837232] env[62510]: DEBUG oslo_concurrency.lockutils [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] Acquiring lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2013.837232] env[62510]: DEBUG oslo_concurrency.lockutils [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] Acquired lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2013.837232] env[62510]: DEBUG nova.network.neutron [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Refreshing network info cache for port 6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2013.844638] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance 'f39e74c3-eb58-4d28-a489-73d2de1e9bef' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2014.011107] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.011395] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.013255] env[62510]: INFO nova.compute.claims [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2014.128015] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769754, 'name': PowerOffVM_Task, 'duration_secs': 0.190002} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.128299] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2014.128483] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance 'f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2014.201468] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769755, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.353302] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2014.353302] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6159d22-ac24-4c38-8f37-43c9384e8a1b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.360816] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2014.360816] env[62510]: value = "task-1769756" [ 2014.360816] env[62510]: _type = "Task" [ 2014.360816] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.369795] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.612426] env[62510]: DEBUG nova.network.neutron [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updated VIF entry in instance network info cache for port 6ae6f8a4-f91b-4f3f-b94e-a75ba935075c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2014.612842] env[62510]: DEBUG nova.network.neutron [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updating instance_info_cache with network_info: [{"id": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "address": "fa:16:3e:34:8d:a7", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ae6f8a4-f9", "ovs_interfaceid": "6ae6f8a4-f91b-4f3f-b94e-a75ba935075c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2014.635718] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2014.636056] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2014.636291] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2014.636562] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2014.636714] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2014.636836] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2014.637075] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2014.637261] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2014.637442] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2014.637630] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2014.637811] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2014.643705] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bc7d3c4-4a5b-4970-b6a8-4ddb16e9dcfe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.660581] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2014.660581] env[62510]: value = "task-1769757" [ 2014.660581] env[62510]: _type = "Task" [ 2014.660581] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.669209] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769757, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.702532] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769755, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.872124] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769756, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.115085] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.115317] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.116587] env[62510]: DEBUG oslo_concurrency.lockutils [req-ab93b875-b0f5-4a79-a4da-5bdeb9a619f6 req-039788f1-6af4-4bf1-8a76-7ffaa752dacf service nova] Releasing lock "refresh_cache-31a181cd-b7cd-42c0-960d-e7d28987dc19" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.163044] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba7d257-01e5-412f-b8a1-9b8b874fd70f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.176609] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4db16c-275e-4fe0-9f99-ebc25d634acb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.179666] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769757, 'name': ReconfigVM_Task, 'duration_secs': 0.275821} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.179923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance 'f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2015.215106] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d44acf8-f802-484f-9556-7cc33a381964 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.222793] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769755, 'name': ReconfigVM_Task, 'duration_secs': 1.20984} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.225458] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2015.226255] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2704378e-6da8-4535-adbe-6c404ae3bc59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.229490] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3462007f-1dc3-4b73-8531-e7607fee0474 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.243573] env[62510]: DEBUG nova.compute.provider_tree [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2015.265133] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2015.266149] env[62510]: DEBUG nova.scheduler.client.report [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2015.269242] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0bbacf4-b1fd-474d-a29f-8d4b313039e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.283049] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.272s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.283588] env[62510]: DEBUG nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2015.293497] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2015.293497] env[62510]: value = "task-1769758" [ 2015.293497] env[62510]: _type = "Task" [ 2015.293497] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.302228] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769758, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.371697] env[62510]: DEBUG oslo_vmware.api [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769756, 'name': PowerOnVM_Task, 'duration_secs': 0.870365} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.371911] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2015.372113] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-de402a13-fc9c-45cd-bf10-c7f75df40f98 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance 'f39e74c3-eb58-4d28-a489-73d2de1e9bef' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2015.620238] env[62510]: DEBUG nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2015.686290] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2015.686531] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2015.686673] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2015.686857] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2015.687016] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2015.687171] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2015.687380] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2015.687537] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2015.687699] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2015.687882] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2015.688073] env[62510]: DEBUG nova.virt.hardware [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2015.693361] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Reconfiguring VM instance instance-0000003b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2015.693863] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7af03711-2fb6-4817-9c30-e5eb5ebcc2e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.712867] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2015.712867] env[62510]: value = "task-1769759" [ 2015.712867] env[62510]: _type = "Task" [ 2015.712867] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.720523] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769759, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.789604] env[62510]: DEBUG nova.compute.utils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2015.791115] env[62510]: DEBUG nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2015.791297] env[62510]: DEBUG nova.network.neutron [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2015.802663] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.840010] env[62510]: DEBUG nova.policy [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e31386240444f2c84e5671a5a3e4634', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc0190059df4469d8487f3e1fbfd05dc', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2016.142343] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.142639] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.144648] env[62510]: INFO nova.compute.claims [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2016.223673] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769759, 'name': ReconfigVM_Task, 'duration_secs': 0.179859} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.224484] env[62510]: DEBUG nova.network.neutron [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Successfully created port: 5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2016.226238] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Reconfigured VM instance instance-0000003b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2016.227068] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf708a9-f814-4382-bb06-d91a6bb937f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.249269] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095/f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2016.249521] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aac9f6ca-4050-43cf-900d-08afb335c7d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.267477] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2016.267477] env[62510]: value = "task-1769760" [ 2016.267477] env[62510]: _type = "Task" [ 2016.267477] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.276457] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769760, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.294193] env[62510]: DEBUG nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2016.306488] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769758, 'name': ReconfigVM_Task, 'duration_secs': 0.782621} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.306488] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2016.306488] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance '22002fc1-647e-4e65-a5f0-c3a34575985f' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2016.777929] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769760, 'name': ReconfigVM_Task, 'duration_secs': 0.260225} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.777929] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Reconfigured VM instance instance-0000003b to attach disk [datastore1] f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095/f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2016.778180] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance 'f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2016.812602] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90aa012-65fe-4cc6-a0c3-833b926a8090 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.836133] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285c6aee-9e02-4c37-af4c-4d7a53502961 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.857628] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance '22002fc1-647e-4e65-a5f0-c3a34575985f' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2017.289140] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d11662-70b3-45aa-8073-c758c1683dc7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.292904] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29b9a13-b5d7-412d-ad25-cb5fcd8d3db0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.320046] env[62510]: DEBUG nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2017.322987] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f696e319-06a1-4ea0-ab89-88ce53754672 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.326394] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34b269c-3962-43b1-bb16-901bab7e5fcd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.345788] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance 'f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2017.382843] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ac7370-cbfb-4559-8a9c-86a2cc0e7b94 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.390360] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2017.390608] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2017.390763] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2017.390944] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2017.391103] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2017.391250] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2017.391452] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2017.391610] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2017.391773] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2017.391932] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2017.392125] env[62510]: DEBUG nova.virt.hardware [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2017.393122] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fabebc-7c07-4e65-96f6-00ace9856082 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.399644] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3aefa7-2fc8-4ccc-8f99-8c99ecef2df5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.406545] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e964d93a-a1c5-4c5d-abaa-c7f2b5ae8319 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.421220] env[62510]: DEBUG nova.compute.provider_tree [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2017.621919] env[62510]: DEBUG nova.compute.manager [req-04150237-29c4-4b34-bb1c-defe61b40890 req-68a9a655-b779-4531-904e-aff382913cb9 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received event network-vif-plugged-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2017.622199] env[62510]: DEBUG oslo_concurrency.lockutils [req-04150237-29c4-4b34-bb1c-defe61b40890 req-68a9a655-b779-4531-904e-aff382913cb9 service nova] Acquiring lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.622410] env[62510]: DEBUG oslo_concurrency.lockutils [req-04150237-29c4-4b34-bb1c-defe61b40890 req-68a9a655-b779-4531-904e-aff382913cb9 service nova] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.622580] env[62510]: DEBUG oslo_concurrency.lockutils [req-04150237-29c4-4b34-bb1c-defe61b40890 req-68a9a655-b779-4531-904e-aff382913cb9 service nova] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.622781] env[62510]: DEBUG nova.compute.manager [req-04150237-29c4-4b34-bb1c-defe61b40890 req-68a9a655-b779-4531-904e-aff382913cb9 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] No waiting events found dispatching network-vif-plugged-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2017.622945] env[62510]: WARNING nova.compute.manager [req-04150237-29c4-4b34-bb1c-defe61b40890 req-68a9a655-b779-4531-904e-aff382913cb9 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received unexpected event network-vif-plugged-5dc06f69-e8a7-42e6-beb5-dc159884a1fd for instance with vm_state building and task_state spawning. [ 2017.735722] env[62510]: DEBUG nova.network.neutron [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Successfully updated port: 5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2017.797468] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.799057] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.799057] env[62510]: DEBUG nova.compute.manager [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Going to confirm migration 7 {{(pid=62510) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 2017.915167] env[62510]: DEBUG nova.network.neutron [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Port 5550cd5d-e9b6-4414-a8e4-e7c6875d2399 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2017.923808] env[62510]: DEBUG nova.scheduler.client.report [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2018.241167] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.241333] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquired lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.241491] env[62510]: DEBUG nova.network.neutron [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2018.345494] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.345746] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquired lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.345881] env[62510]: DEBUG nova.network.neutron [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2018.346082] env[62510]: DEBUG nova.objects.instance [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'info_cache' on Instance uuid f39e74c3-eb58-4d28-a489-73d2de1e9bef {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2018.428305] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.286s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.428881] env[62510]: DEBUG nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2018.517423] env[62510]: DEBUG nova.network.neutron [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Port cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2018.774217] env[62510]: DEBUG nova.network.neutron [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2018.931979] env[62510]: DEBUG nova.network.neutron [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.934676] env[62510]: DEBUG nova.compute.utils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2018.947495] env[62510]: DEBUG nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2018.947495] env[62510]: DEBUG nova.network.neutron [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2018.958782] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.959116] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.959393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.961742] env[62510]: DEBUG nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2019.005536] env[62510]: DEBUG nova.policy [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ac183a237d24bc2a644cc3ba05d7f1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59300e0f20144d9f88b78f7c971e86c9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2019.262845] env[62510]: DEBUG nova.network.neutron [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Successfully created port: 12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2019.438508] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Releasing lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.438934] env[62510]: DEBUG nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Instance network_info: |[{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2019.439417] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:30:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dc06f69-e8a7-42e6-beb5-dc159884a1fd', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2019.448665] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Creating folder: Project (bc0190059df4469d8487f3e1fbfd05dc). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2019.448974] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64c7610a-376a-4961-acfe-b6ca973ea3bd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.462157] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Created folder: Project (bc0190059df4469d8487f3e1fbfd05dc) in parent group-v367197. [ 2019.462349] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Creating folder: Instances. Parent ref: group-v367509. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2019.462795] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-627c0455-979e-429c-9929-3d0195a51fb3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.484113] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Created folder: Instances in parent group-v367509. [ 2019.484365] env[62510]: DEBUG oslo.service.loopingcall [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2019.484562] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2019.484774] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9f7adc1-d334-4cbd-a7c9-30a20be1a27e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.507308] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2019.507308] env[62510]: value = "task-1769763" [ 2019.507308] env[62510]: _type = "Task" [ 2019.507308] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.516316] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769763, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.543981] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.544538] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.545029] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.632872] env[62510]: DEBUG nova.network.neutron [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [{"id": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "address": "fa:16:3e:5b:92:84", "network": {"id": "9b209a99-520e-436f-be97-fe37ae505518", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1482163995-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86abf24d608d4c438161dc0b8335dea1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9630cae2-7dd9-42b7-8b53-91ab254af243", "external-id": "nsx-vlan-transportzone-900", "segmentation_id": 900, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap453df25e-58", "ovs_interfaceid": "453df25e-58eb-42b3-aa0a-3771b21d6b25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.650543] env[62510]: DEBUG nova.compute.manager [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2019.650836] env[62510]: DEBUG nova.compute.manager [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing instance network info cache due to event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2019.651092] env[62510]: DEBUG oslo_concurrency.lockutils [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] Acquiring lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.651244] env[62510]: DEBUG oslo_concurrency.lockutils [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] Acquired lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.651408] env[62510]: DEBUG nova.network.neutron [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2019.978040] env[62510]: DEBUG nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2019.994809] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.995019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.995183] env[62510]: DEBUG nova.network.neutron [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2020.004505] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2020.004689] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2020.004844] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2020.005036] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2020.005188] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2020.005334] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2020.005632] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2020.005799] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2020.005965] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2020.006143] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2020.006320] env[62510]: DEBUG nova.virt.hardware [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2020.007224] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e806ab94-b123-40aa-b368-7049aa51a517 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.021342] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24a4b99-068e-48cd-88ac-d7b0c6a22252 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.026671] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769763, 'name': CreateVM_Task, 'duration_secs': 0.354984} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.027060] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2020.027934] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2020.028102] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2020.028404] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2020.028676] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b74870a4-a901-4b12-8877-ad8cf39845ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.040531] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2020.040531] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]529137da-1740-16ff-4b46-a0b7b6555686" [ 2020.040531] env[62510]: _type = "Task" [ 2020.040531] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.047468] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529137da-1740-16ff-4b46-a0b7b6555686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.136112] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Releasing lock "refresh_cache-f39e74c3-eb58-4d28-a489-73d2de1e9bef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.136373] env[62510]: DEBUG nova.objects.instance [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'migration_context' on Instance uuid f39e74c3-eb58-4d28-a489-73d2de1e9bef {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2020.323416] env[62510]: DEBUG nova.network.neutron [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updated VIF entry in instance network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2020.323781] env[62510]: DEBUG nova.network.neutron [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.553672] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]529137da-1740-16ff-4b46-a0b7b6555686, 'name': SearchDatastore_Task, 'duration_secs': 0.009179} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.553988] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.554131] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2020.554360] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2020.554509] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2020.554721] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2020.555300] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19d8feb4-0bcc-42e1-a0ae-9b308e631b4a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.568798] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2020.569064] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2020.569792] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1387722-3bbf-4367-8dac-75b9b96eb55d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.575272] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2020.575272] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5282fc5a-0019-f280-69e6-f7d7b26ed09b" [ 2020.575272] env[62510]: _type = "Task" [ 2020.575272] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.582354] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5282fc5a-0019-f280-69e6-f7d7b26ed09b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.601039] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2020.601209] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2020.601384] env[62510]: DEBUG nova.network.neutron [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2020.640038] env[62510]: DEBUG nova.objects.base [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2020.643021] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912eb98b-e04f-4d9e-b645-a850b71dbff2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.663421] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02e3f8ec-7f61-424e-9547-1bfb99f5986b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.669842] env[62510]: DEBUG oslo_vmware.api [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2020.669842] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c07cc7-23c0-c5eb-6bd5-07f71ca22389" [ 2020.669842] env[62510]: _type = "Task" [ 2020.669842] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.677976] env[62510]: DEBUG oslo_vmware.api [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c07cc7-23c0-c5eb-6bd5-07f71ca22389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.732234] env[62510]: DEBUG nova.network.neutron [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.826133] env[62510]: DEBUG oslo_concurrency.lockutils [req-711a25fd-2209-487e-97e6-c0ddb501854f req-ce52956a-864d-4df7-835d-5cca29ffafab service nova] Releasing lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.080788] env[62510]: DEBUG nova.network.neutron [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Successfully updated port: 12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2021.088733] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5282fc5a-0019-f280-69e6-f7d7b26ed09b, 'name': SearchDatastore_Task, 'duration_secs': 0.043996} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.089578] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-253373ce-e1ab-47c5-8f44-4391f4a7fa4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.095859] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2021.095859] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5253bf51-0dfd-3291-8aa4-077442340b3a" [ 2021.095859] env[62510]: _type = "Task" [ 2021.095859] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.105911] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5253bf51-0dfd-3291-8aa4-077442340b3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.181417] env[62510]: DEBUG oslo_vmware.api [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c07cc7-23c0-c5eb-6bd5-07f71ca22389, 'name': SearchDatastore_Task, 'duration_secs': 0.01883} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.181682] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.181905] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.234623] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.330332] env[62510]: DEBUG nova.network.neutron [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.583688] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.584043] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.584112] env[62510]: DEBUG nova.network.neutron [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2021.609826] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5253bf51-0dfd-3291-8aa4-077442340b3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010541} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.610213] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.610593] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/f9dc3ae0-a004-4baf-a972-e4480774cc3f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2021.610976] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee79e472-e982-4395-a05c-985ef3111771 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.619296] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2021.619296] env[62510]: value = "task-1769764" [ 2021.619296] env[62510]: _type = "Task" [ 2021.619296] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.630403] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.681625] env[62510]: DEBUG nova.compute.manager [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Received event network-vif-plugged-12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2021.681845] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.682070] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.682224] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.682389] env[62510]: DEBUG nova.compute.manager [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] No waiting events found dispatching network-vif-plugged-12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2021.682547] env[62510]: WARNING nova.compute.manager [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Received unexpected event network-vif-plugged-12cbde53-7f97-41bf-818c-04b6c994d690 for instance with vm_state building and task_state spawning. [ 2021.682707] env[62510]: DEBUG nova.compute.manager [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Received event network-changed-12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2021.682859] env[62510]: DEBUG nova.compute.manager [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Refreshing instance network info cache due to event network-changed-12cbde53-7f97-41bf-818c-04b6c994d690. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2021.683030] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] Acquiring lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.755467] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43d1e3d-64a4-4fcf-84bc-9c15baad5bad {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.776171] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fb53c9-76d6-4aee-886b-3ce74ccc987b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.783442] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance 'f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2021.835685] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.865644] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdc241a-8afc-4e64-bf28-76d504d8f93b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.874491] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05d17dd-a07e-47b5-8a9d-5f216ad2c012 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.909750] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7eba012-b566-487d-80f7-428c2d94b92f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.918895] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d6594d-ee8c-4707-b5c0-84db3ad69ad1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.934847] env[62510]: DEBUG nova.compute.provider_tree [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2022.129162] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769764, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.136192] env[62510]: DEBUG nova.network.neutron [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2022.290114] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2022.290418] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c512f6d0-affc-416b-ab0f-75863ba62379 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.298780] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2022.298780] env[62510]: value = "task-1769765" [ 2022.298780] env[62510]: _type = "Task" [ 2022.298780] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.306785] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769765, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.323316] env[62510]: DEBUG nova.network.neutron [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating instance_info_cache with network_info: [{"id": "12cbde53-7f97-41bf-818c-04b6c994d690", "address": "fa:16:3e:75:92:c1", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12cbde53-7f", "ovs_interfaceid": "12cbde53-7f97-41bf-818c-04b6c994d690", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.345186] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc1eec9-e4d7-4dba-ac4c-154d46ebf483 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.352265] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c78907-1c47-4582-84a2-c68536a80b21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.437825] env[62510]: DEBUG nova.scheduler.client.report [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2022.631381] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769764, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.872441} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.631731] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/f9dc3ae0-a004-4baf-a972-e4480774cc3f.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2022.631930] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2022.632158] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c8248aa-ae1c-43cb-acd7-778e346dc84b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.639444] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2022.639444] env[62510]: value = "task-1769766" [ 2022.639444] env[62510]: _type = "Task" [ 2022.639444] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.648431] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769766, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.810357] env[62510]: DEBUG oslo_vmware.api [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769765, 'name': PowerOnVM_Task, 'duration_secs': 0.503209} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.810639] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2022.810788] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea239b80-2841-49fb-9551-67495600e8f5 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance 'f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2022.826038] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.826278] env[62510]: DEBUG nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Instance network_info: |[{"id": "12cbde53-7f97-41bf-818c-04b6c994d690", "address": "fa:16:3e:75:92:c1", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12cbde53-7f", "ovs_interfaceid": "12cbde53-7f97-41bf-818c-04b6c994d690", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2022.826546] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] Acquired lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.826722] env[62510]: DEBUG nova.network.neutron [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Refreshing network info cache for port 12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2022.827749] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:92:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbdab640-5fea-4254-8bd3-f855b7eaca0d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12cbde53-7f97-41bf-818c-04b6c994d690', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2022.835169] env[62510]: DEBUG oslo.service.loopingcall [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2022.838078] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2022.838642] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-606dcbdd-e9af-4afe-a0cb-9c8483d1a24d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.858905] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2022.858905] env[62510]: value = "task-1769767" [ 2022.858905] env[62510]: _type = "Task" [ 2022.858905] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.867249] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769767, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.134473] env[62510]: DEBUG nova.network.neutron [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updated VIF entry in instance network info cache for port 12cbde53-7f97-41bf-818c-04b6c994d690. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2023.134859] env[62510]: DEBUG nova.network.neutron [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating instance_info_cache with network_info: [{"id": "12cbde53-7f97-41bf-818c-04b6c994d690", "address": "fa:16:3e:75:92:c1", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12cbde53-7f", "ovs_interfaceid": "12cbde53-7f97-41bf-818c-04b6c994d690", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.150386] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769766, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080274} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.150638] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2023.151406] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e92e02d-ebf0-4285-a219-85a8a0ea49df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.173521] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/f9dc3ae0-a004-4baf-a972-e4480774cc3f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2023.173804] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b896539-0a6c-4c06-8744-e51581b5d820 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.193372] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2023.193372] env[62510]: value = "task-1769768" [ 2023.193372] env[62510]: _type = "Task" [ 2023.193372] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.201500] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769768, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.369433] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769767, 'name': CreateVM_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.448186] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.266s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.457394] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964ea628-9a79-42d6-ba1c-c2db72de8096 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.479532] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325f45b2-ecb6-4ded-96ce-a001f1b78f7e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.486846] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance '22002fc1-647e-4e65-a5f0-c3a34575985f' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2023.637596] env[62510]: DEBUG oslo_concurrency.lockutils [req-0e3874bd-e724-4b03-a127-abf17df62c8c req-90279e28-8291-44b7-8f2e-97c448c0aa2c service nova] Releasing lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.704662] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769768, 'name': ReconfigVM_Task, 'duration_secs': 0.358291} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.704936] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Reconfigured VM instance instance-00000079 to attach disk [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/f9dc3ae0-a004-4baf-a972-e4480774cc3f.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2023.705587] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a384840-0042-4a3d-95ba-bfcd18642b84 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.712916] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2023.712916] env[62510]: value = "task-1769769" [ 2023.712916] env[62510]: _type = "Task" [ 2023.712916] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.721706] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769769, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.868925] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769767, 'name': CreateVM_Task, 'duration_secs': 0.719394} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.869223] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2023.869775] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2023.869970] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2023.870303] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2023.871230] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64648854-9674-456e-b6f8-3aed35bfaf73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.876065] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2023.876065] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52061593-f68d-c10c-0773-7dac2d1ee91b" [ 2023.876065] env[62510]: _type = "Task" [ 2023.876065] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.883221] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52061593-f68d-c10c-0773-7dac2d1ee91b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.993420] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2023.993729] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4d541c1-bf30-485d-b2f3-904256059ae7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.001252] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2024.001252] env[62510]: value = "task-1769770" [ 2024.001252] env[62510]: _type = "Task" [ 2024.001252] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.002164] env[62510]: INFO nova.scheduler.client.report [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted allocation for migration eb277a30-dc24-4d19-8bd9-02104d08ca83 [ 2024.014165] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769770, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.223422] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769769, 'name': Rename_Task, 'duration_secs': 0.146312} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.223662] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2024.223907] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-adcc681f-4c18-4630-9243-3d8480a850a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.230478] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2024.230478] env[62510]: value = "task-1769771" [ 2024.230478] env[62510]: _type = "Task" [ 2024.230478] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.237693] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.386184] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52061593-f68d-c10c-0773-7dac2d1ee91b, 'name': SearchDatastore_Task, 'duration_secs': 0.009438} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.386469] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2024.386717] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2024.386976] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2024.387101] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2024.387277] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2024.387529] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8707f517-32cc-47e5-88f0-50f3ab1018c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.413082] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2024.413273] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2024.413989] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-832ea99e-f8a6-41d7-bf5b-25fb67871a31 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.420008] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2024.420008] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52a1ac9e-3036-c102-5385-259012357123" [ 2024.420008] env[62510]: _type = "Task" [ 2024.420008] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.429067] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a1ac9e-3036-c102-5385-259012357123, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.515030] env[62510]: DEBUG oslo_concurrency.lockutils [None req-549dc243-92df-4a1f-bb0d-ef7081f8677a tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.716s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.515030] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769770, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.537298] env[62510]: INFO nova.compute.manager [None req-57b59464-2756-4657-8f7d-67b166b3269e tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Get console output [ 2024.537476] env[62510]: WARNING nova.virt.vmwareapi.driver [None req-57b59464-2756-4657-8f7d-67b166b3269e tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] The console log is missing. Check your VSPC configuration [ 2024.740918] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769771, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.809790] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.810095] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.810349] env[62510]: DEBUG nova.compute.manager [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Going to confirm migration 9 {{(pid=62510) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5157}} [ 2024.932142] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52a1ac9e-3036-c102-5385-259012357123, 'name': SearchDatastore_Task, 'duration_secs': 0.05337} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.932925] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e21baaee-8e6f-482c-962e-8f08d937302f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.938055] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2024.938055] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52246424-8d22-48da-bb46-34329c7b64ac" [ 2024.938055] env[62510]: _type = "Task" [ 2024.938055] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.946168] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52246424-8d22-48da-bb46-34329c7b64ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.014284] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769770, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.241828] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769771, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.394935] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2025.395156] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2025.395341] env[62510]: DEBUG nova.network.neutron [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2025.395526] env[62510]: DEBUG nova.objects.instance [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'info_cache' on Instance uuid f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2025.448432] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52246424-8d22-48da-bb46-34329c7b64ac, 'name': SearchDatastore_Task, 'duration_secs': 0.011968} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.448703] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2025.448957] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 94dd7eee-f799-4fb5-854b-e7d59621b125/94dd7eee-f799-4fb5-854b-e7d59621b125.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2025.449223] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e02ffce9-9e87-4156-b350-caed3835ba89 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.455803] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2025.455803] env[62510]: value = "task-1769772" [ 2025.455803] env[62510]: _type = "Task" [ 2025.455803] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.463410] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.513802] env[62510]: DEBUG oslo_vmware.api [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769770, 'name': PowerOnVM_Task, 'duration_secs': 1.264398} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.514083] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2025.514273] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea16c156-6c4b-4ca4-b2db-f3d8bac45961 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance '22002fc1-647e-4e65-a5f0-c3a34575985f' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2025.744554] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769771, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.965654] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.244320] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769771, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.466768] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.972453} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.467040] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 94dd7eee-f799-4fb5-854b-e7d59621b125/94dd7eee-f799-4fb5-854b-e7d59621b125.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2026.467260] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2026.467506] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf8818bf-8336-4564-928b-d012d6e17013 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.474613] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2026.474613] env[62510]: value = "task-1769773" [ 2026.474613] env[62510]: _type = "Task" [ 2026.474613] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.482910] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769773, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.714987] env[62510]: DEBUG nova.network.neutron [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [{"id": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "address": "fa:16:3e:c1:31:c9", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5550cd5d-e9", "ovs_interfaceid": "5550cd5d-e9b6-4414-a8e4-e7c6875d2399", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.743313] env[62510]: DEBUG oslo_vmware.api [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769771, 'name': PowerOnVM_Task, 'duration_secs': 2.049125} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.743570] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2026.743782] env[62510]: INFO nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Took 9.42 seconds to spawn the instance on the hypervisor. [ 2026.743955] env[62510]: DEBUG nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2026.744721] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd9d207-7c31-43c3-82b7-ac99c6aa3477 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.984510] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769773, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.157203} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.984772] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2026.985538] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07beb7ef-0eb4-489f-b170-c1f049b07fea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.009924] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 94dd7eee-f799-4fb5-854b-e7d59621b125/94dd7eee-f799-4fb5-854b-e7d59621b125.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2027.010252] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e7197e2-0baf-45bd-bc52-afbbd0565b7f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.043299] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2027.043299] env[62510]: value = "task-1769774" [ 2027.043299] env[62510]: _type = "Task" [ 2027.043299] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.055294] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769774, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.218034] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2027.218034] env[62510]: DEBUG nova.objects.instance [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'migration_context' on Instance uuid f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2027.259764] env[62510]: INFO nova.compute.manager [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Took 13.26 seconds to build instance. [ 2027.539589] env[62510]: INFO nova.compute.manager [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Rescuing [ 2027.539894] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2027.540076] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquired lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2027.540249] env[62510]: DEBUG nova.network.neutron [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2027.553353] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.720756] env[62510]: DEBUG nova.objects.base [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2027.721748] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c7e726-4a7e-4d01-a411-58adf273a84e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.742146] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-379ca9b0-5703-47f0-99f2-fe87d31163ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.747751] env[62510]: DEBUG oslo_vmware.api [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2027.747751] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5239b3e2-bd0b-a7a1-bdf7-0251de127119" [ 2027.747751] env[62510]: _type = "Task" [ 2027.747751] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.755669] env[62510]: DEBUG oslo_vmware.api [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5239b3e2-bd0b-a7a1-bdf7-0251de127119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.762197] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e3ab91dc-20d6-4523-96e1-f5ff243b18f8 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.774s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.054252] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769774, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.258423] env[62510]: DEBUG oslo_vmware.api [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5239b3e2-bd0b-a7a1-bdf7-0251de127119, 'name': SearchDatastore_Task, 'duration_secs': 0.020393} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.258785] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.259026] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.266873] env[62510]: DEBUG nova.network.neutron [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2028.341791] env[62510]: DEBUG nova.network.neutron [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Port cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2028.342069] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.342225] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.342386] env[62510]: DEBUG nova.network.neutron [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2028.554428] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769774, 'name': ReconfigVM_Task, 'duration_secs': 1.101118} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.554695] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 94dd7eee-f799-4fb5-854b-e7d59621b125/94dd7eee-f799-4fb5-854b-e7d59621b125.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2028.555327] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4075a9c9-721a-4a9e-ba37-e287beff34fe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.561103] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2028.561103] env[62510]: value = "task-1769775" [ 2028.561103] env[62510]: _type = "Task" [ 2028.561103] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.769219] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Releasing lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2028.901431] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5085a888-c857-4e9a-aebf-1c55882e880e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.909137] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53c0bcc-d008-4f7b-9a3d-e7e6d1d735f0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.940280] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a734547-5816-4a6e-9d4c-807a442fb67f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.947673] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8204cc5a-ab23-4d8f-b272-7f8299695a89 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.961467] env[62510]: DEBUG nova.compute.provider_tree [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2029.072599] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769775, 'name': Rename_Task, 'duration_secs': 0.191336} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.072838] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2029.073187] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63daaa3a-1bef-4f24-85eb-541124e9c859 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.079809] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2029.079809] env[62510]: value = "task-1769776" [ 2029.079809] env[62510]: _type = "Task" [ 2029.079809] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.087176] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.182247] env[62510]: DEBUG nova.network.neutron [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2029.465033] env[62510]: DEBUG nova.scheduler.client.report [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2029.589474] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769776, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.684710] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.090600] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769776, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.188835] env[62510]: DEBUG nova.compute.manager [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62510) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2030.306495] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2030.307522] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bed087e9-7cea-45db-93e9-3a1353d8fdca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.314363] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2030.314363] env[62510]: value = "task-1769777" [ 2030.314363] env[62510]: _type = "Task" [ 2030.314363] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.322605] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769777, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.475330] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.216s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.589875] env[62510]: DEBUG oslo_vmware.api [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769776, 'name': PowerOnVM_Task, 'duration_secs': 1.028384} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.590239] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2030.590449] env[62510]: INFO nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Took 10.61 seconds to spawn the instance on the hypervisor. [ 2030.590632] env[62510]: DEBUG nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2030.591398] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1b21a2-80ee-45cf-bdf8-6ff52423f7b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.825104] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769777, 'name': PowerOffVM_Task, 'duration_secs': 0.249457} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.825359] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2030.826160] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4daa8d08-3b6f-463e-8988-389c584337e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.845787] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53bd57b8-4ad7-43b7-a46d-7f02a19c0037 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.873512] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2030.873778] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0e3c6f7-943e-4720-8627-2c4c30ff657d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.880564] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2030.880564] env[62510]: value = "task-1769778" [ 2030.880564] env[62510]: _type = "Task" [ 2030.880564] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.887916] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769778, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.038105] env[62510]: INFO nova.scheduler.client.report [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted allocation for migration cd18239d-4bf8-4b7c-9fc5-a02ec9c2ec54 [ 2031.109949] env[62510]: INFO nova.compute.manager [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Took 14.98 seconds to build instance. [ 2031.291393] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.291451] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2031.391744] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2031.392024] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2031.392227] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.392369] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.392546] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2031.392911] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57766644-2a9a-46da-afb8-c1ae4c243aaa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.401534] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2031.401712] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2031.402470] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7a0bd90-2fec-4a30-a839-c991ab524132 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.407269] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2031.407269] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52dbddf8-5597-1386-290c-eb78e7d32724" [ 2031.407269] env[62510]: _type = "Task" [ 2031.407269] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.415272] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dbddf8-5597-1386-290c-eb78e7d32724, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.543185] env[62510]: DEBUG oslo_concurrency.lockutils [None req-052fb69e-5f3c-42c1-9268-e252eff1e80a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.733s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.612068] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c5475e31-de1a-4d9d-8641-45a0b06be887 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.496s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.794361] env[62510]: DEBUG nova.objects.instance [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'migration_context' on Instance uuid 22002fc1-647e-4e65-a5f0-c3a34575985f {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2031.917033] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52dbddf8-5597-1386-290c-eb78e7d32724, 'name': SearchDatastore_Task, 'duration_secs': 0.008548} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.917859] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6619515-71d1-4fcf-b242-b492d46c6542 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.924095] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2031.924095] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5241a3ff-1908-d8fd-d2e2-e549ce9a255d" [ 2031.924095] env[62510]: _type = "Task" [ 2031.924095] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.933311] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5241a3ff-1908-d8fd-d2e2-e549ce9a255d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.948577] env[62510]: DEBUG nova.compute.manager [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Received event network-changed-12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2031.948837] env[62510]: DEBUG nova.compute.manager [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Refreshing instance network info cache due to event network-changed-12cbde53-7f97-41bf-818c-04b6c994d690. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2031.949069] env[62510]: DEBUG oslo_concurrency.lockutils [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] Acquiring lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2031.949207] env[62510]: DEBUG oslo_concurrency.lockutils [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] Acquired lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.949372] env[62510]: DEBUG nova.network.neutron [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Refreshing network info cache for port 12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2032.134134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.134134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.134134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.134134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.134134] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.135276] env[62510]: INFO nova.compute.manager [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Terminating instance [ 2032.411229] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441a2005-5f1e-4c9b-b4b1-22adac161de5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.419307] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02799ae8-5542-4e14-a0ee-2148bfd3304f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.456146] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d3949a-6917-4d46-895e-8b5b45fce6b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.463733] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5241a3ff-1908-d8fd-d2e2-e549ce9a255d, 'name': SearchDatastore_Task, 'duration_secs': 0.01068} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.463951] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.464224] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. {{(pid=62510) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 2032.464488] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8df061b-7506-40ad-90be-e10bb2831dbb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.469972] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b39ff92-99a9-4691-afce-d1a7fa5daaaa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.474459] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2032.474459] env[62510]: value = "task-1769779" [ 2032.474459] env[62510]: _type = "Task" [ 2032.474459] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.485218] env[62510]: DEBUG nova.compute.provider_tree [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2032.491136] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.639253] env[62510]: DEBUG nova.compute.manager [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2032.639527] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2032.640529] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14bb2fdf-3517-415d-b851-1285ea25e115 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.652609] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2032.652948] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed3ef62d-fe83-4aca-8553-b9b581d680b5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.662444] env[62510]: DEBUG oslo_vmware.api [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2032.662444] env[62510]: value = "task-1769780" [ 2032.662444] env[62510]: _type = "Task" [ 2032.662444] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.676739] env[62510]: DEBUG oslo_vmware.api [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.731151] env[62510]: DEBUG nova.network.neutron [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updated VIF entry in instance network info cache for port 12cbde53-7f97-41bf-818c-04b6c994d690. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2032.731670] env[62510]: DEBUG nova.network.neutron [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating instance_info_cache with network_info: [{"id": "12cbde53-7f97-41bf-818c-04b6c994d690", "address": "fa:16:3e:75:92:c1", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12cbde53-7f", "ovs_interfaceid": "12cbde53-7f97-41bf-818c-04b6c994d690", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2032.986347] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769779, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.988632] env[62510]: DEBUG nova.scheduler.client.report [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2033.172302] env[62510]: DEBUG oslo_vmware.api [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769780, 'name': PowerOffVM_Task, 'duration_secs': 0.393802} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.172685] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2033.172749] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2033.172981] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9f5d36ea-f599-4fe5-b251-6b041579f035 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.234457] env[62510]: DEBUG oslo_concurrency.lockutils [req-44c3ced4-5237-4c3e-9d35-2026a8737acd req-7359fc89-18d5-4256-88c8-9320633d6da9 service nova] Releasing lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.330424] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2033.330615] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2033.330747] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleting the datastore file [datastore1] f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2033.331017] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be150fa5-0e9d-473c-9d08-f3a619dcf2cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.337243] env[62510]: DEBUG oslo_vmware.api [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2033.337243] env[62510]: value = "task-1769782" [ 2033.337243] env[62510]: _type = "Task" [ 2033.337243] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.345016] env[62510]: DEBUG oslo_vmware.api [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.485759] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526003} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.486091] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk. [ 2033.486937] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7f917a-43b4-4c32-a2ec-87e29bfacbf1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.515687] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2033.516179] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-171f1d36-db65-47b6-982d-be758def9226 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.537029] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2033.537029] env[62510]: value = "task-1769783" [ 2033.537029] env[62510]: _type = "Task" [ 2033.537029] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.545739] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769783, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.847703] env[62510]: DEBUG oslo_vmware.api [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.019092] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.727s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.048674] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769783, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.347715] env[62510]: DEBUG oslo_vmware.api [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.647611} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.348088] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2034.348251] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2034.348407] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2034.348588] env[62510]: INFO nova.compute.manager [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Took 1.71 seconds to destroy the instance on the hypervisor. [ 2034.348860] env[62510]: DEBUG oslo.service.loopingcall [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2034.349071] env[62510]: DEBUG nova.compute.manager [-] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2034.349168] env[62510]: DEBUG nova.network.neutron [-] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2034.549307] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769783, 'name': ReconfigVM_Task, 'duration_secs': 0.739721} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.549307] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Reconfigured VM instance instance-00000079 to attach disk [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f/645af513-c243-4722-b631-714f21477ae6-rescue.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2034.549777] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bf5f00-949c-4283-8433-0d220c0c62c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.575919] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e2efb89-8582-4748-88b2-6c14401ad01f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.591070] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2034.591070] env[62510]: value = "task-1769784" [ 2034.591070] env[62510]: _type = "Task" [ 2034.591070] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.599381] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769784, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.815042] env[62510]: DEBUG nova.compute.manager [req-d587e5a7-60ab-459d-bd75-3eb017fc1003 req-a9e33c84-c115-40a3-a8ac-8a130afd71a7 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Received event network-vif-deleted-5550cd5d-e9b6-4414-a8e4-e7c6875d2399 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2034.815268] env[62510]: INFO nova.compute.manager [req-d587e5a7-60ab-459d-bd75-3eb017fc1003 req-a9e33c84-c115-40a3-a8ac-8a130afd71a7 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Neutron deleted interface 5550cd5d-e9b6-4414-a8e4-e7c6875d2399; detaching it from the instance and deleting it from the info cache [ 2034.815439] env[62510]: DEBUG nova.network.neutron [req-d587e5a7-60ab-459d-bd75-3eb017fc1003 req-a9e33c84-c115-40a3-a8ac-8a130afd71a7 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.100881] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769784, 'name': ReconfigVM_Task, 'duration_secs': 0.149615} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.101174] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2035.101416] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66aa22e1-723b-4d4d-8b24-72577afb5656 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.107708] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2035.107708] env[62510]: value = "task-1769785" [ 2035.107708] env[62510]: _type = "Task" [ 2035.107708] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.115315] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.286223] env[62510]: DEBUG nova.network.neutron [-] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.318590] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b13065e3-8213-4465-9670-4d2cc1e108c2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.328373] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b27bbb-49b8-4d8b-acce-a63465ecca28 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.360491] env[62510]: DEBUG nova.compute.manager [req-d587e5a7-60ab-459d-bd75-3eb017fc1003 req-a9e33c84-c115-40a3-a8ac-8a130afd71a7 service nova] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Detach interface failed, port_id=5550cd5d-e9b6-4414-a8e4-e7c6875d2399, reason: Instance f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2035.559331] env[62510]: INFO nova.compute.manager [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Swapping old allocation on dict_keys(['c3653102-341b-4ed1-8b1f-1abaf8aa3e56']) held by migration 1526651e-ada9-4a8e-a6a6-c130f982d0a2 for instance [ 2035.582958] env[62510]: DEBUG nova.scheduler.client.report [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Overwriting current allocation {'allocations': {'c3653102-341b-4ed1-8b1f-1abaf8aa3e56': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 178}}, 'project_id': 'bae4f0adee8c4c28add1849316448538', 'user_id': '80f05c3e00b84277b4401aa98a253692', 'consumer_generation': 1} on consumer 22002fc1-647e-4e65-a5f0-c3a34575985f {{(pid=62510) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 2035.618149] env[62510]: DEBUG oslo_vmware.api [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769785, 'name': PowerOnVM_Task, 'duration_secs': 0.48114} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.618489] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2035.621573] env[62510]: DEBUG nova.compute.manager [None req-f99de4c9-fd3f-4c65-b62d-a398543f84c6 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2035.622415] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722610e9-583b-42ff-8521-d7b9fbcd2e6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.665641] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.665861] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.666099] env[62510]: DEBUG nova.network.neutron [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2035.788577] env[62510]: INFO nova.compute.manager [-] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Took 1.44 seconds to deallocate network for instance. [ 2036.295517] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.295769] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.295999] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.318027] env[62510]: INFO nova.scheduler.client.report [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted allocations for instance f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095 [ 2036.513285] env[62510]: DEBUG nova.network.neutron [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [{"id": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "address": "fa:16:3e:28:36:9e", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc8e6d9b-23", "ovs_interfaceid": "cc8e6d9b-23a8-4a82-bce2-858b46a9cf25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.831523] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4e7ca7ea-a4b3-44c2-a2d2-c714bda31911 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.699s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.892422] env[62510]: DEBUG nova.compute.manager [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2036.892643] env[62510]: DEBUG nova.compute.manager [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing instance network info cache due to event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2036.892806] env[62510]: DEBUG oslo_concurrency.lockutils [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] Acquiring lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.892951] env[62510]: DEBUG oslo_concurrency.lockutils [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] Acquired lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.893131] env[62510]: DEBUG nova.network.neutron [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2037.016490] env[62510]: DEBUG oslo_concurrency.lockutils [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-22002fc1-647e-4e65-a5f0-c3a34575985f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.017500] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50429353-3b6c-47aa-85da-e059c197ea5d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.025480] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2a745c-d8ff-4c5a-99d3-8cc18680cfa0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.744049] env[62510]: DEBUG nova.network.neutron [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updated VIF entry in instance network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2037.744433] env[62510]: DEBUG nova.network.neutron [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.113329] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2038.113657] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a77102f-bd0f-4123-b203-0f33be675c06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.122717] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2038.122717] env[62510]: value = "task-1769786" [ 2038.122717] env[62510]: _type = "Task" [ 2038.122717] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.137811] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.246757] env[62510]: DEBUG oslo_concurrency.lockutils [req-7a04d450-0a6f-487c-acb8-64f21574f8c8 req-89c28698-feef-425b-b778-244568728d9b service nova] Releasing lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.370230] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "62417973-075e-4128-8eb5-4c62946856e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.370475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.636890] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769786, 'name': PowerOffVM_Task, 'duration_secs': 0.488265} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.637804] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2038.638516] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2038.638817] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2038.638956] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2038.639145] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2038.639346] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2038.639533] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2038.639819] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2038.640050] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2038.640260] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2038.640472] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2038.640670] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2038.647418] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4057ca7-b0cc-41f1-80e0-d724db23ebcb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.664635] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2038.664635] env[62510]: value = "task-1769787" [ 2038.664635] env[62510]: _type = "Task" [ 2038.664635] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.674974] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769787, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.877026] env[62510]: DEBUG nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2038.929985] env[62510]: DEBUG nova.compute.manager [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2038.930304] env[62510]: DEBUG nova.compute.manager [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing instance network info cache due to event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2038.930552] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] Acquiring lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.930707] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] Acquired lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.930878] env[62510]: DEBUG nova.network.neutron [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2039.178751] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769787, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.403168] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2039.403424] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.405278] env[62510]: INFO nova.compute.claims [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2039.676726] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769787, 'name': ReconfigVM_Task, 'duration_secs': 0.513386} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.677594] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5633ad36-4123-424e-9197-a7dc3c56ba47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.702106] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2039.702347] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2039.702505] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2039.702738] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2039.702834] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2039.702995] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2039.703180] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2039.703337] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2039.703499] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2039.703660] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2039.703830] env[62510]: DEBUG nova.virt.hardware [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2039.704709] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42935165-6c57-46e9-bafd-87423511b543 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.710379] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2039.710379] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523880cb-9ab4-4276-77e4-92c172c73d47" [ 2039.710379] env[62510]: _type = "Task" [ 2039.710379] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.719531] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523880cb-9ab4-4276-77e4-92c172c73d47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.738185] env[62510]: DEBUG nova.network.neutron [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updated VIF entry in instance network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2039.738695] env[62510]: DEBUG nova.network.neutron [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.221089] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523880cb-9ab4-4276-77e4-92c172c73d47, 'name': SearchDatastore_Task, 'duration_secs': 0.010538} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.226474] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfiguring VM instance instance-00000072 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2040.226742] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-877b102d-0cdb-4052-a3df-fe19917ea7ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.240530] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] Releasing lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.240775] env[62510]: DEBUG nova.compute.manager [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2040.240942] env[62510]: DEBUG nova.compute.manager [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing instance network info cache due to event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2040.241170] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] Acquiring lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.241314] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] Acquired lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.241475] env[62510]: DEBUG nova.network.neutron [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2040.245412] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2040.245412] env[62510]: value = "task-1769788" [ 2040.245412] env[62510]: _type = "Task" [ 2040.245412] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.253859] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769788, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.515100] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d6b310-4faf-4e61-a8d2-33995303aba0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.522688] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1eed905-4e51-4df4-8f44-c93aba776852 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.553057] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c71772-ea32-45c0-8b83-3bbcf093c1cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.560032] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e5222d-405a-4f66-92d1-810703faea0f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.572857] env[62510]: DEBUG nova.compute.provider_tree [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2040.645685] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.645857] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.645995] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.646148] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.646304] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.648364] env[62510]: INFO nova.compute.manager [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Terminating instance [ 2040.754548] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769788, 'name': ReconfigVM_Task, 'duration_secs': 0.198305} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.754815] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfigured VM instance instance-00000072 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2040.755602] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f80145-503b-45e7-8f1c-2e8f41bab2fc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.779976] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2040.782200] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-638640e2-a5aa-4f41-8dd0-1851b38a53ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.800745] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2040.800745] env[62510]: value = "task-1769789" [ 2040.800745] env[62510]: _type = "Task" [ 2040.800745] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.808257] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769789, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.952347] env[62510]: DEBUG nova.network.neutron [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updated VIF entry in instance network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2040.952715] env[62510]: DEBUG nova.network.neutron [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.961456] env[62510]: DEBUG nova.compute.manager [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2040.961653] env[62510]: DEBUG nova.compute.manager [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing instance network info cache due to event network-changed-5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2040.961864] env[62510]: DEBUG oslo_concurrency.lockutils [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] Acquiring lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.076158] env[62510]: DEBUG nova.scheduler.client.report [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2041.152524] env[62510]: DEBUG nova.compute.manager [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2041.152785] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2041.153801] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594c8ded-425b-466f-959e-169ba0c925bb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.161835] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2041.162089] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df9ed581-7337-4f70-885e-37d40f491837 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.168126] env[62510]: DEBUG oslo_vmware.api [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2041.168126] env[62510]: value = "task-1769790" [ 2041.168126] env[62510]: _type = "Task" [ 2041.168126] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.176857] env[62510]: DEBUG oslo_vmware.api [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769790, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.310359] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769789, 'name': ReconfigVM_Task, 'duration_secs': 0.347765} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.310751] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f/22002fc1-647e-4e65-a5f0-c3a34575985f.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2041.311704] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde92c47-c873-49f6-9b7d-39247d3fb934 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.332524] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172cc00d-3a09-47ff-9642-0f23d903b58b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.354169] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea672e9b-3602-4531-b6e4-b1ef09cdc887 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.375331] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3e5d8e-dfdc-45e5-a863-a4ba2cbd8d8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.382690] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2041.382951] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-42a0094e-d0ae-44a7-94a3-ccb8ad8a6e59 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.389539] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2041.389539] env[62510]: value = "task-1769791" [ 2041.389539] env[62510]: _type = "Task" [ 2041.389539] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.397362] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769791, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.455335] env[62510]: DEBUG oslo_concurrency.lockutils [req-9bb975f7-648d-42f8-ab16-739a5e8a50a4 req-316da4d7-f0f5-4dfa-8b74-d97871d75c32 service nova] Releasing lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.455867] env[62510]: DEBUG oslo_concurrency.lockutils [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] Acquired lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.456111] env[62510]: DEBUG nova.network.neutron [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Refreshing network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2041.580751] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.581312] env[62510]: DEBUG nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2041.678130] env[62510]: DEBUG oslo_vmware.api [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769790, 'name': PowerOffVM_Task, 'duration_secs': 0.219347} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.678387] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2041.678556] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2041.678799] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d808fcf-b4c4-4d27-8d98-4fd9bda85a28 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.765424] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2041.765765] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2041.765765] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Deleting the datastore file [datastore1] f9dc3ae0-a004-4baf-a972-e4480774cc3f {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2041.766015] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-459d08d8-cc74-4a21-a2ad-0a55d6645ae6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.772605] env[62510]: DEBUG oslo_vmware.api [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for the task: (returnval){ [ 2041.772605] env[62510]: value = "task-1769793" [ 2041.772605] env[62510]: _type = "Task" [ 2041.772605] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.780718] env[62510]: DEBUG oslo_vmware.api [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.899864] env[62510]: DEBUG oslo_vmware.api [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769791, 'name': PowerOnVM_Task, 'duration_secs': 0.409375} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.900121] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2042.086379] env[62510]: DEBUG nova.compute.utils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2042.087726] env[62510]: DEBUG nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2042.087897] env[62510]: DEBUG nova.network.neutron [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2042.123882] env[62510]: DEBUG nova.policy [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e483d7dc32804985bc9af5128670131b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b13a257970e4a9a9f9cfecaaf37d9da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2042.133784] env[62510]: DEBUG nova.network.neutron [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updated VIF entry in instance network info cache for port 5dc06f69-e8a7-42e6-beb5-dc159884a1fd. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2042.134157] env[62510]: DEBUG nova.network.neutron [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [{"id": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "address": "fa:16:3e:60:30:a8", "network": {"id": "ebbeee87-e946-483c-a3dc-c38e3ff8a9ba", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1070827522-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bc0190059df4469d8487f3e1fbfd05dc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dc06f69-e8", "ovs_interfaceid": "5dc06f69-e8a7-42e6-beb5-dc159884a1fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.282795] env[62510]: DEBUG oslo_vmware.api [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Task: {'id': task-1769793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187532} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.283058] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2042.283247] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2042.283422] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2042.283600] env[62510]: INFO nova.compute.manager [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2042.283839] env[62510]: DEBUG oslo.service.loopingcall [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2042.284044] env[62510]: DEBUG nova.compute.manager [-] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2042.284144] env[62510]: DEBUG nova.network.neutron [-] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2042.436425] env[62510]: DEBUG nova.network.neutron [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Successfully created port: 7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2042.590471] env[62510]: DEBUG nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2042.637246] env[62510]: DEBUG oslo_concurrency.lockutils [req-67106dc4-cce2-4deb-829f-260d01d605cf req-963ee0f1-fa7f-42a8-b663-fc2dd061be2a service nova] Releasing lock "refresh_cache-f9dc3ae0-a004-4baf-a972-e4480774cc3f" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.950545] env[62510]: INFO nova.compute.manager [None req-47ed46f9-5dce-42b5-89b5-38cd44f58bdc tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance to original state: 'active' [ 2042.991164] env[62510]: DEBUG nova.compute.manager [req-c0c60a0e-a942-4acf-b206-e3b7f1c3b74b req-a134e52f-8bb7-4ddc-8947-f5cc7f0f44a7 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Received event network-vif-deleted-5dc06f69-e8a7-42e6-beb5-dc159884a1fd {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2042.991508] env[62510]: INFO nova.compute.manager [req-c0c60a0e-a942-4acf-b206-e3b7f1c3b74b req-a134e52f-8bb7-4ddc-8947-f5cc7f0f44a7 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Neutron deleted interface 5dc06f69-e8a7-42e6-beb5-dc159884a1fd; detaching it from the instance and deleting it from the info cache [ 2042.991795] env[62510]: DEBUG nova.network.neutron [req-c0c60a0e-a942-4acf-b206-e3b7f1c3b74b req-a134e52f-8bb7-4ddc-8947-f5cc7f0f44a7 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.066046] env[62510]: DEBUG nova.network.neutron [-] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.495146] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ada95617-a830-41d7-8a3b-4c4876a5d668 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.506794] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64be62f-aed9-49f3-adad-92afbf768925 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.540604] env[62510]: DEBUG nova.compute.manager [req-c0c60a0e-a942-4acf-b206-e3b7f1c3b74b req-a134e52f-8bb7-4ddc-8947-f5cc7f0f44a7 service nova] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Detach interface failed, port_id=5dc06f69-e8a7-42e6-beb5-dc159884a1fd, reason: Instance f9dc3ae0-a004-4baf-a972-e4480774cc3f could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2043.568903] env[62510]: INFO nova.compute.manager [-] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Took 1.28 seconds to deallocate network for instance. [ 2043.599632] env[62510]: DEBUG nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2043.626330] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2043.626535] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2043.626715] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2043.626917] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2043.627128] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2043.627301] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2043.627527] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2043.627707] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2043.627888] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2043.628068] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2043.628264] env[62510]: DEBUG nova.virt.hardware [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2043.629219] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bf148f-775f-444a-9d84-cd5e1ef02b4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.638151] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1755cda3-8d25-46f4-a7fb-c14129e65166 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.839381] env[62510]: DEBUG nova.compute.manager [req-5b34f152-ef4a-4c5a-a61f-9dd4d2405e82 req-13e42728-3cd5-41d5-ab6b-4570dc2cf3b4 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Received event network-vif-plugged-7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2043.839381] env[62510]: DEBUG oslo_concurrency.lockutils [req-5b34f152-ef4a-4c5a-a61f-9dd4d2405e82 req-13e42728-3cd5-41d5-ab6b-4570dc2cf3b4 service nova] Acquiring lock "62417973-075e-4128-8eb5-4c62946856e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.839381] env[62510]: DEBUG oslo_concurrency.lockutils [req-5b34f152-ef4a-4c5a-a61f-9dd4d2405e82 req-13e42728-3cd5-41d5-ab6b-4570dc2cf3b4 service nova] Lock "62417973-075e-4128-8eb5-4c62946856e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.839381] env[62510]: DEBUG oslo_concurrency.lockutils [req-5b34f152-ef4a-4c5a-a61f-9dd4d2405e82 req-13e42728-3cd5-41d5-ab6b-4570dc2cf3b4 service nova] Lock "62417973-075e-4128-8eb5-4c62946856e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.839600] env[62510]: DEBUG nova.compute.manager [req-5b34f152-ef4a-4c5a-a61f-9dd4d2405e82 req-13e42728-3cd5-41d5-ab6b-4570dc2cf3b4 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] No waiting events found dispatching network-vif-plugged-7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2043.839924] env[62510]: WARNING nova.compute.manager [req-5b34f152-ef4a-4c5a-a61f-9dd4d2405e82 req-13e42728-3cd5-41d5-ab6b-4570dc2cf3b4 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Received unexpected event network-vif-plugged-7afce004-8ac8-4715-a27d-7e5162c006ba for instance with vm_state building and task_state spawning. [ 2043.927589] env[62510]: DEBUG nova.network.neutron [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Successfully updated port: 7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2044.075735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.076023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.076250] env[62510]: DEBUG nova.objects.instance [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lazy-loading 'resources' on Instance uuid f9dc3ae0-a004-4baf-a972-e4480774cc3f {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2044.430889] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.431053] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.431223] env[62510]: DEBUG nova.network.neutron [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2044.475694] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "22002fc1-647e-4e65-a5f0-c3a34575985f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.475924] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.476161] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.476350] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.476516] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.478748] env[62510]: INFO nova.compute.manager [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Terminating instance [ 2044.682705] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b16692-7bf1-49e4-a122-1e6d11048e22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.690540] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ff56e3-7938-4181-a247-7bbf6f613b47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.721873] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d166872b-dee1-4338-b74f-faacd8543b47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.729050] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064dbd12-08e0-468e-a32a-5df45744c2b7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.742030] env[62510]: DEBUG nova.compute.provider_tree [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2044.971809] env[62510]: DEBUG nova.network.neutron [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2044.982818] env[62510]: DEBUG nova.compute.manager [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2044.983078] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2044.983358] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dde1fcea-e9f3-42a0-bdc8-2444e8509142 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.991011] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2044.991011] env[62510]: value = "task-1769794" [ 2044.991011] env[62510]: _type = "Task" [ 2044.991011] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.001113] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769794, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.138481] env[62510]: DEBUG nova.network.neutron [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.245373] env[62510]: DEBUG nova.scheduler.client.report [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2045.503429] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769794, 'name': PowerOffVM_Task, 'duration_secs': 0.228777} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.503645] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2045.503838] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2045.504040] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367507', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'name': 'volume-adb312a1-8d6e-418b-86ab-664579515ac4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '22002fc1-647e-4e65-a5f0-c3a34575985f', 'attached_at': '2024-12-11T19:45:28.000000', 'detached_at': '', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'serial': 'adb312a1-8d6e-418b-86ab-664579515ac4'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2045.504815] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad3705d-b6e2-4a67-8454-cbaeccfb646a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.525022] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cbe9f7-c3de-4031-b989-0ce2faf879c5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.531695] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c092c8d-2966-4b27-b3ec-857b665659ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.552173] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f498c1bb-cd4c-47c5-9939-aa78e8085794 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.566303] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] The volume has not been displaced from its original location: [datastore1] volume-adb312a1-8d6e-418b-86ab-664579515ac4/volume-adb312a1-8d6e-418b-86ab-664579515ac4.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2045.571488] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2045.571734] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e646771-d2a8-4f7b-b789-85b7c9197bfa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.589663] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2045.589663] env[62510]: value = "task-1769795" [ 2045.589663] env[62510]: _type = "Task" [ 2045.589663] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.598544] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.641321] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.641741] env[62510]: DEBUG nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Instance network_info: |[{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2045.642465] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:da:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7afce004-8ac8-4715-a27d-7e5162c006ba', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2045.649948] env[62510]: DEBUG oslo.service.loopingcall [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2045.650189] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2045.650411] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c807f3f2-867f-4768-9a18-5b6ffc3c25da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.670225] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2045.670225] env[62510]: value = "task-1769796" [ 2045.670225] env[62510]: _type = "Task" [ 2045.670225] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.677541] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769796, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.749854] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.674s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2045.769470] env[62510]: INFO nova.scheduler.client.report [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Deleted allocations for instance f9dc3ae0-a004-4baf-a972-e4480774cc3f [ 2045.864545] env[62510]: DEBUG nova.compute.manager [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Received event network-changed-7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2045.864744] env[62510]: DEBUG nova.compute.manager [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Refreshing instance network info cache due to event network-changed-7afce004-8ac8-4715-a27d-7e5162c006ba. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2045.864959] env[62510]: DEBUG oslo_concurrency.lockutils [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] Acquiring lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2045.865098] env[62510]: DEBUG oslo_concurrency.lockutils [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] Acquired lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.865272] env[62510]: DEBUG nova.network.neutron [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Refreshing network info cache for port 7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2046.098868] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769795, 'name': ReconfigVM_Task, 'duration_secs': 0.299334} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.099228] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2046.103912] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23e34f15-92c2-424f-af9a-db775a9dd5b3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.118552] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2046.118552] env[62510]: value = "task-1769797" [ 2046.118552] env[62510]: _type = "Task" [ 2046.118552] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.126176] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769797, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.179386] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769796, 'name': CreateVM_Task, 'duration_secs': 0.385661} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.179542] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2046.180222] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.180428] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.180786] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2046.180986] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-489ce6a9-cc45-407f-a8f3-c2abd0c5eb45 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.184978] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2046.184978] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524ff3cc-38a8-f822-2825-b72d44e745cf" [ 2046.184978] env[62510]: _type = "Task" [ 2046.184978] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.192097] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524ff3cc-38a8-f822-2825-b72d44e745cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.277720] env[62510]: DEBUG oslo_concurrency.lockutils [None req-121d0b34-3edb-4884-8b54-0d10fd5698e1 tempest-ServerRescueTestJSONUnderV235-483296453 tempest-ServerRescueTestJSONUnderV235-483296453-project-member] Lock "f9dc3ae0-a004-4baf-a972-e4480774cc3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.632s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.555563] env[62510]: DEBUG nova.network.neutron [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updated VIF entry in instance network info cache for port 7afce004-8ac8-4715-a27d-7e5162c006ba. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2046.555992] env[62510]: DEBUG nova.network.neutron [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2046.629237] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769797, 'name': ReconfigVM_Task, 'duration_secs': 0.135407} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.629540] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367507', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'name': 'volume-adb312a1-8d6e-418b-86ab-664579515ac4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '22002fc1-647e-4e65-a5f0-c3a34575985f', 'attached_at': '2024-12-11T19:45:28.000000', 'detached_at': '', 'volume_id': 'adb312a1-8d6e-418b-86ab-664579515ac4', 'serial': 'adb312a1-8d6e-418b-86ab-664579515ac4'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2046.629827] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2046.630849] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67caf92-3d53-4db0-8779-f03cc41437bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.637393] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2046.637611] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eccc0f24-d4ae-493f-a1c2-cc7e2dde96eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.695280] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524ff3cc-38a8-f822-2825-b72d44e745cf, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.695441] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.695645] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2046.695868] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.696023] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.696205] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2046.696454] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f5a265a-c43b-47eb-aef8-a378ab3d0a0a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.713166] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2046.713344] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2046.714034] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d18ba601-6cc4-4df8-9ec0-c0920ea09c33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.719064] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2046.719064] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5262d5fe-7693-c4fc-5aec-8ae7b6426a07" [ 2046.719064] env[62510]: _type = "Task" [ 2046.719064] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.726146] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5262d5fe-7693-c4fc-5aec-8ae7b6426a07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.851318] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2046.851672] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2046.851712] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleting the datastore file [datastore1] 22002fc1-647e-4e65-a5f0-c3a34575985f {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2046.851969] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3187a460-bb24-4010-bf81-028cdab8cd1b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.859087] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2046.859087] env[62510]: value = "task-1769799" [ 2046.859087] env[62510]: _type = "Task" [ 2046.859087] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.867166] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.058708] env[62510]: DEBUG oslo_concurrency.lockutils [req-3465de72-b60d-4173-88b4-46e05b670b2c req-b603c99e-865b-44ce-b0c8-c713d9153995 service nova] Releasing lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.230115] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5262d5fe-7693-c4fc-5aec-8ae7b6426a07, 'name': SearchDatastore_Task, 'duration_secs': 0.009987} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.230878] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4658d49-4923-41fa-839a-164554a6cdb4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.235900] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2047.235900] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]527c5808-1052-aed6-e6ed-2cc76f82889a" [ 2047.235900] env[62510]: _type = "Task" [ 2047.235900] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.243477] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527c5808-1052-aed6-e6ed-2cc76f82889a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.368983] env[62510]: DEBUG oslo_vmware.api [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168047} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.369399] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2047.369648] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2047.369853] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2047.370099] env[62510]: INFO nova.compute.manager [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Took 2.39 seconds to destroy the instance on the hypervisor. [ 2047.370330] env[62510]: DEBUG oslo.service.loopingcall [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2047.370624] env[62510]: DEBUG nova.compute.manager [-] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2047.370734] env[62510]: DEBUG nova.network.neutron [-] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2047.747247] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]527c5808-1052-aed6-e6ed-2cc76f82889a, 'name': SearchDatastore_Task, 'duration_secs': 0.010257} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.747567] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.747880] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2047.748108] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c7f68e5-5005-4a78-b045-05c50b64fb8f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.754691] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2047.754691] env[62510]: value = "task-1769800" [ 2047.754691] env[62510]: _type = "Task" [ 2047.754691] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.763363] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769800, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.239044] env[62510]: DEBUG nova.compute.manager [req-cfd050d0-7fb0-4fc0-a4f2-0a3d737c8410 req-ff951d6e-90b8-4e82-b869-1e97211837e2 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Received event network-vif-deleted-cc8e6d9b-23a8-4a82-bce2-858b46a9cf25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2048.239044] env[62510]: INFO nova.compute.manager [req-cfd050d0-7fb0-4fc0-a4f2-0a3d737c8410 req-ff951d6e-90b8-4e82-b869-1e97211837e2 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Neutron deleted interface cc8e6d9b-23a8-4a82-bce2-858b46a9cf25; detaching it from the instance and deleting it from the info cache [ 2048.239044] env[62510]: DEBUG nova.network.neutron [req-cfd050d0-7fb0-4fc0-a4f2-0a3d737c8410 req-ff951d6e-90b8-4e82-b869-1e97211837e2 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.264904] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769800, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.709496] env[62510]: DEBUG nova.network.neutron [-] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.744020] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-877799f5-33ad-4a16-b9d8-36531e4e1cff {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.753146] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ed4851-c900-4a7c-91dc-4c280a5ef3c0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.774584] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769800, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.786545] env[62510]: DEBUG nova.compute.manager [req-cfd050d0-7fb0-4fc0-a4f2-0a3d737c8410 req-ff951d6e-90b8-4e82-b869-1e97211837e2 service nova] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Detach interface failed, port_id=cc8e6d9b-23a8-4a82-bce2-858b46a9cf25, reason: Instance 22002fc1-647e-4e65-a5f0-c3a34575985f could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2049.212088] env[62510]: INFO nova.compute.manager [-] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Took 1.84 seconds to deallocate network for instance. [ 2049.278351] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769800, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.775802] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769800, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.602794} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.776785] env[62510]: INFO nova.compute.manager [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Took 0.56 seconds to detach 1 volumes for instance. [ 2049.778432] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2049.778656] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2049.778914] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90b8df4e-9379-4ed1-924f-e09730b2fe85 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.790172] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2049.790172] env[62510]: value = "task-1769801" [ 2049.790172] env[62510]: _type = "Task" [ 2049.790172] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.798586] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769801, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.286627] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.286978] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.287118] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.300150] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769801, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063561} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.300457] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2050.301197] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c45d826-279b-4ac1-88a0-3e995fd38724 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.323417] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2050.324500] env[62510]: INFO nova.scheduler.client.report [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted allocations for instance 22002fc1-647e-4e65-a5f0-c3a34575985f [ 2050.328512] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc21634d-e387-4adb-9bdf-d12f43323abe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.351318] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2050.351318] env[62510]: value = "task-1769802" [ 2050.351318] env[62510]: _type = "Task" [ 2050.351318] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.359680] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769802, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.521860] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.522106] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.832301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4141c910-93e2-411c-9ad1-b78cae5ed059 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "22002fc1-647e-4e65-a5f0-c3a34575985f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.356s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.861746] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769802, 'name': ReconfigVM_Task, 'duration_secs': 0.2908} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.862034] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2050.862634] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06b403c1-bf8e-4089-9cd1-85692e676007 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.869882] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2050.869882] env[62510]: value = "task-1769803" [ 2050.869882] env[62510]: _type = "Task" [ 2050.869882] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2050.877733] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769803, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.025827] env[62510]: DEBUG nova.compute.utils [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2051.380412] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769803, 'name': Rename_Task, 'duration_secs': 0.196837} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.380712] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2051.380922] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1553919-3b50-4390-bb62-2bbaf5e2097e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.387762] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2051.387762] env[62510]: value = "task-1769804" [ 2051.387762] env[62510]: _type = "Task" [ 2051.387762] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.395972] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769804, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.528761] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.642648] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.642880] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.855470] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.855738] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.855953] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.856235] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.856436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.858549] env[62510]: INFO nova.compute.manager [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Terminating instance [ 2051.898456] env[62510]: DEBUG oslo_vmware.api [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769804, 'name': PowerOnVM_Task, 'duration_secs': 0.449619} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2051.898705] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2051.898902] env[62510]: INFO nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Took 8.30 seconds to spawn the instance on the hypervisor. [ 2051.899107] env[62510]: DEBUG nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2051.899858] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ac8807-f790-477e-92bd-7235e2055112 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.145470] env[62510]: DEBUG nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2052.362673] env[62510]: DEBUG nova.compute.manager [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2052.362908] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2052.363208] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7eb66316-d568-4cb3-8f54-d669e5758e22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.371184] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2052.371184] env[62510]: value = "task-1769805" [ 2052.371184] env[62510]: _type = "Task" [ 2052.371184] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.379466] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769805, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.416926] env[62510]: INFO nova.compute.manager [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Took 13.03 seconds to build instance. [ 2052.585846] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.586133] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.586375] env[62510]: INFO nova.compute.manager [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Attaching volume 12c8daa9-e51f-421a-aba5-226b43033367 to /dev/sdb [ 2052.619605] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc921076-a350-49fa-a58c-8fcb15965e7a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.626560] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68fe3d8-3e11-475e-9886-3fbe238ee5f6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.640695] env[62510]: DEBUG nova.virt.block_device [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updating existing volume attachment record: deafe013-344b-4c47-982a-c37bd7498b61 {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2052.668560] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.668821] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.670448] env[62510]: INFO nova.compute.claims [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2052.881449] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769805, 'name': PowerOffVM_Task, 'duration_secs': 0.210164} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2052.881819] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2052.882111] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2052.882331] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367499', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'name': 'volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'f39e74c3-eb58-4d28-a489-73d2de1e9bef', 'attached_at': '2024-12-11T19:45:04.000000', 'detached_at': '', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'serial': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2052.883103] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af993b6-0ef1-4838-bd6e-ec4cce687398 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.901535] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a7a295-7dcb-4417-99f4-5368de1badc3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.908428] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c8cbee-021d-4176-b52a-5afbee6f77ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.925928] env[62510]: DEBUG oslo_concurrency.lockutils [None req-149983fa-e8b8-4537-a530-de2e1d17c1bd tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.555s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.927789] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e56b62-09ae-45e3-a893-278e71997a76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.931285] env[62510]: DEBUG nova.compute.manager [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Received event network-changed-7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2052.931476] env[62510]: DEBUG nova.compute.manager [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Refreshing instance network info cache due to event network-changed-7afce004-8ac8-4715-a27d-7e5162c006ba. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2052.931686] env[62510]: DEBUG oslo_concurrency.lockutils [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] Acquiring lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2052.931827] env[62510]: DEBUG oslo_concurrency.lockutils [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] Acquired lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2052.933767] env[62510]: DEBUG nova.network.neutron [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Refreshing network info cache for port 7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2052.948385] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] The volume has not been displaced from its original location: [datastore1] volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8/volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2052.953735] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfiguring VM instance instance-00000077 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2052.954638] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52bc8958-e62e-46a4-9540-d2c4be220b4d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.974420] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2052.974420] env[62510]: value = "task-1769807" [ 2052.974420] env[62510]: _type = "Task" [ 2052.974420] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2052.982689] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769807, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.484347] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769807, 'name': ReconfigVM_Task, 'duration_secs': 0.165047} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2053.484672] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Reconfigured VM instance instance-00000077 to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2053.489572] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6edd0ae1-195e-4eb0-bc48-6c9919eaffc3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.507299] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2053.507299] env[62510]: value = "task-1769808" [ 2053.507299] env[62510]: _type = "Task" [ 2053.507299] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2053.514841] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769808, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.649477] env[62510]: DEBUG nova.network.neutron [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updated VIF entry in instance network info cache for port 7afce004-8ac8-4715-a27d-7e5162c006ba. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2053.649691] env[62510]: DEBUG nova.network.neutron [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2053.767523] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fb9b04-d0c3-4a85-9428-8498895f989d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.775091] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c6ed76-fb42-4bfc-ad8b-0d30b6c88692 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.805996] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32cef20-95d7-4657-af43-6975646189e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.812846] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322eacce-14f8-4e70-ab5f-2f63d66255f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.826715] env[62510]: DEBUG nova.compute.provider_tree [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2054.016559] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769808, 'name': ReconfigVM_Task, 'duration_secs': 0.21391} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.016858] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367499', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'name': 'volume-5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'f39e74c3-eb58-4d28-a489-73d2de1e9bef', 'attached_at': '2024-12-11T19:45:04.000000', 'detached_at': '', 'volume_id': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8', 'serial': '5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2054.017151] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2054.017914] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ef1541-09db-4722-9e89-f1bea313bb75 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.024786] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2054.025011] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a8e1051-a469-4ab2-b758-ab66227f70a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.106505] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2054.106730] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2054.106909] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleting the datastore file [datastore1] f39e74c3-eb58-4d28-a489-73d2de1e9bef {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2054.107221] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48c38835-6fd5-4285-b96b-d9fd013db72e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.115154] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2054.115154] env[62510]: value = "task-1769810" [ 2054.115154] env[62510]: _type = "Task" [ 2054.115154] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.122599] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769810, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.153280] env[62510]: DEBUG oslo_concurrency.lockutils [req-8c743d7b-af65-4d43-8ce4-9daacd7ecb4b req-f5d0c6fc-8c42-442a-ac54-feee6371cb3a service nova] Releasing lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.330709] env[62510]: DEBUG nova.scheduler.client.report [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2054.625395] env[62510]: DEBUG oslo_vmware.api [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769810, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098978} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.625800] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2054.625853] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2054.625987] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2054.626184] env[62510]: INFO nova.compute.manager [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Took 2.26 seconds to destroy the instance on the hypervisor. [ 2054.626430] env[62510]: DEBUG oslo.service.loopingcall [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2054.626619] env[62510]: DEBUG nova.compute.manager [-] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2054.626715] env[62510]: DEBUG nova.network.neutron [-] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2054.837016] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.837543] env[62510]: DEBUG nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2055.098147] env[62510]: DEBUG nova.compute.manager [req-3d3d4270-f772-48e9-a1c7-4b63e127ffc6 req-dad48660-f720-4256-b32c-2285539d3551 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Received event network-vif-deleted-453df25e-58eb-42b3-aa0a-3771b21d6b25 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2055.098358] env[62510]: INFO nova.compute.manager [req-3d3d4270-f772-48e9-a1c7-4b63e127ffc6 req-dad48660-f720-4256-b32c-2285539d3551 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Neutron deleted interface 453df25e-58eb-42b3-aa0a-3771b21d6b25; detaching it from the instance and deleting it from the info cache [ 2055.098533] env[62510]: DEBUG nova.network.neutron [req-3d3d4270-f772-48e9-a1c7-4b63e127ffc6 req-dad48660-f720-4256-b32c-2285539d3551 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.342373] env[62510]: DEBUG nova.compute.utils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2055.344101] env[62510]: DEBUG nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2055.344284] env[62510]: DEBUG nova.network.neutron [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2055.383506] env[62510]: DEBUG nova.policy [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '80f05c3e00b84277b4401aa98a253692', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bae4f0adee8c4c28add1849316448538', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2055.565334] env[62510]: DEBUG nova.network.neutron [-] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.601547] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-877c6950-565d-4d34-bf28-6c8e334e7513 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.613747] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0187fef-871f-49b3-8075-a2af851adb21 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.645563] env[62510]: DEBUG nova.compute.manager [req-3d3d4270-f772-48e9-a1c7-4b63e127ffc6 req-dad48660-f720-4256-b32c-2285539d3551 service nova] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Detach interface failed, port_id=453df25e-58eb-42b3-aa0a-3771b21d6b25, reason: Instance f39e74c3-eb58-4d28-a489-73d2de1e9bef could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2055.646940] env[62510]: DEBUG nova.network.neutron [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Successfully created port: 47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2055.847061] env[62510]: DEBUG nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2056.067594] env[62510]: INFO nova.compute.manager [-] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Took 1.44 seconds to deallocate network for instance. [ 2056.615960] env[62510]: INFO nova.compute.manager [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Took 0.55 seconds to detach 1 volumes for instance. [ 2056.619725] env[62510]: DEBUG nova.compute.manager [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Deleting volume: 5f4e1cdd-b5a6-4d73-9faf-1b45c89d6de8 {{(pid=62510) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2056.856538] env[62510]: DEBUG nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2056.887071] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2056.887356] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2056.887539] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2056.887735] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2056.887895] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2056.888065] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2056.888412] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2056.888517] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2056.888696] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2056.888889] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2056.889085] env[62510]: DEBUG nova.virt.hardware [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2056.890014] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5723c3b5-cffc-4bd2-a277-63fc9786ae13 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.898031] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff216e1-e7d2-482b-b056-3fec5a34b0a8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.129912] env[62510]: DEBUG nova.compute.manager [req-586eb783-a260-4cdd-95ac-20f0e34ca504 req-9ca22007-3380-4077-bd2c-4730fa0c56b7 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-vif-plugged-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2057.130407] env[62510]: DEBUG oslo_concurrency.lockutils [req-586eb783-a260-4cdd-95ac-20f0e34ca504 req-9ca22007-3380-4077-bd2c-4730fa0c56b7 service nova] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.130616] env[62510]: DEBUG oslo_concurrency.lockutils [req-586eb783-a260-4cdd-95ac-20f0e34ca504 req-9ca22007-3380-4077-bd2c-4730fa0c56b7 service nova] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.130872] env[62510]: DEBUG oslo_concurrency.lockutils [req-586eb783-a260-4cdd-95ac-20f0e34ca504 req-9ca22007-3380-4077-bd2c-4730fa0c56b7 service nova] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.131229] env[62510]: DEBUG nova.compute.manager [req-586eb783-a260-4cdd-95ac-20f0e34ca504 req-9ca22007-3380-4077-bd2c-4730fa0c56b7 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] No waiting events found dispatching network-vif-plugged-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2057.131516] env[62510]: WARNING nova.compute.manager [req-586eb783-a260-4cdd-95ac-20f0e34ca504 req-9ca22007-3380-4077-bd2c-4730fa0c56b7 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received unexpected event network-vif-plugged-47759f10-ede2-4020-b8a8-36effea384c5 for instance with vm_state building and task_state spawning. [ 2057.132591] env[62510]: DEBUG nova.network.neutron [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Successfully updated port: 47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2057.172219] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.172465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.172649] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.189558] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2057.189770] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367514', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'name': 'volume-12c8daa9-e51f-421a-aba5-226b43033367', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '31a181cd-b7cd-42c0-960d-e7d28987dc19', 'attached_at': '', 'detached_at': '', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'serial': '12c8daa9-e51f-421a-aba5-226b43033367'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2057.190740] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6811f8-0a38-4bdb-84df-665cff1d11b1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.194707] env[62510]: INFO nova.scheduler.client.report [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted allocations for instance f39e74c3-eb58-4d28-a489-73d2de1e9bef [ 2057.209108] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ae90d7-5ec9-4883-8258-bd7b9c5dcc64 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.235270] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] volume-12c8daa9-e51f-421a-aba5-226b43033367/volume-12c8daa9-e51f-421a-aba5-226b43033367.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2057.235540] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d198bb7-4efd-47e9-9403-4be455e46525 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.254050] env[62510]: DEBUG oslo_vmware.api [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2057.254050] env[62510]: value = "task-1769813" [ 2057.254050] env[62510]: _type = "Task" [ 2057.254050] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.261963] env[62510]: DEBUG oslo_vmware.api [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769813, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.636043] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.636043] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.636261] env[62510]: DEBUG nova.network.neutron [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2057.714702] env[62510]: DEBUG oslo_concurrency.lockutils [None req-120a02de-c0dc-46f5-97ab-01c33956ffeb tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "f39e74c3-eb58-4d28-a489-73d2de1e9bef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.859s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.764266] env[62510]: DEBUG oslo_vmware.api [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769813, 'name': ReconfigVM_Task, 'duration_secs': 0.393063} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.764630] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Reconfigured VM instance instance-00000078 to attach disk [datastore1] volume-12c8daa9-e51f-421a-aba5-226b43033367/volume-12c8daa9-e51f-421a-aba5-226b43033367.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2057.770054] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efbd2a79-2591-4019-b3f8-d57b754863e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.785529] env[62510]: DEBUG oslo_vmware.api [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2057.785529] env[62510]: value = "task-1769814" [ 2057.785529] env[62510]: _type = "Task" [ 2057.785529] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.793049] env[62510]: DEBUG oslo_vmware.api [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.168078] env[62510]: DEBUG nova.network.neutron [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2058.295463] env[62510]: DEBUG oslo_vmware.api [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769814, 'name': ReconfigVM_Task, 'duration_secs': 0.134076} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.296358] env[62510]: DEBUG nova.network.neutron [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.297660] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367514', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'name': 'volume-12c8daa9-e51f-421a-aba5-226b43033367', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '31a181cd-b7cd-42c0-960d-e7d28987dc19', 'attached_at': '', 'detached_at': '', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'serial': '12c8daa9-e51f-421a-aba5-226b43033367'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2058.567063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "fe3b3380-69bb-4563-abf2-9f0db439d31a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.567063] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.567295] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "fe3b3380-69bb-4563-abf2-9f0db439d31a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.567436] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.567606] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.569742] env[62510]: INFO nova.compute.manager [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Terminating instance [ 2058.800540] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.800864] env[62510]: DEBUG nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Instance network_info: |[{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2058.803371] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:5c:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47759f10-ede2-4020-b8a8-36effea384c5', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2058.811062] env[62510]: DEBUG oslo.service.loopingcall [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2058.811484] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2058.811710] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16be645b-81da-4de5-8129-17b9b3b15827 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.832648] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2058.832648] env[62510]: value = "task-1769815" [ 2058.832648] env[62510]: _type = "Task" [ 2058.832648] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.841870] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769815, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.073455] env[62510]: DEBUG nova.compute.manager [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2059.073695] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2059.074597] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b499b5-5f8f-4c47-97dd-823d9eea34ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.082107] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2059.082343] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57ea2a9c-bc04-4756-b18d-293f1d65da66 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.088731] env[62510]: DEBUG oslo_vmware.api [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2059.088731] env[62510]: value = "task-1769816" [ 2059.088731] env[62510]: _type = "Task" [ 2059.088731] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.097746] env[62510]: DEBUG oslo_vmware.api [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.155599] env[62510]: DEBUG nova.compute.manager [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-changed-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2059.155915] env[62510]: DEBUG nova.compute.manager [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing instance network info cache due to event network-changed-47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2059.156200] env[62510]: DEBUG oslo_concurrency.lockutils [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.156396] env[62510]: DEBUG oslo_concurrency.lockutils [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.156574] env[62510]: DEBUG nova.network.neutron [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2059.342466] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769815, 'name': CreateVM_Task, 'duration_secs': 0.311417} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.342847] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2059.343300] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.343465] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.343835] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2059.344109] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ef686e0-cf56-4e1f-a4ef-bdb8153cb66b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.346127] env[62510]: DEBUG nova.objects.instance [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'flavor' on Instance uuid 31a181cd-b7cd-42c0-960d-e7d28987dc19 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2059.350448] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2059.350448] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52c88306-28df-4c66-12c5-ff79fc7f3756" [ 2059.350448] env[62510]: _type = "Task" [ 2059.350448] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.358871] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c88306-28df-4c66-12c5-ff79fc7f3756, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.600358] env[62510]: DEBUG oslo_vmware.api [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769816, 'name': PowerOffVM_Task, 'duration_secs': 0.237692} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.600683] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2059.600928] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2059.601226] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63f486b1-3610-417b-ac33-2b0c3fc0249d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.681201] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2059.681473] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2059.681637] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleting the datastore file [datastore1] fe3b3380-69bb-4563-abf2-9f0db439d31a {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2059.681906] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32cffc8d-674a-466c-8db1-2cc14de1b8d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.689885] env[62510]: DEBUG oslo_vmware.api [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2059.689885] env[62510]: value = "task-1769818" [ 2059.689885] env[62510]: _type = "Task" [ 2059.689885] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.697732] env[62510]: DEBUG oslo_vmware.api [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.850739] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6aa04873-d614-4cc3-8bfd-fbb6946ee6e6 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.265s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.862146] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52c88306-28df-4c66-12c5-ff79fc7f3756, 'name': SearchDatastore_Task, 'duration_secs': 0.009835} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.863064] env[62510]: DEBUG nova.network.neutron [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updated VIF entry in instance network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2059.863490] env[62510]: DEBUG nova.network.neutron [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.864637] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.864871] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2059.865149] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2059.865256] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2059.865518] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2059.865870] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-acdf32cc-8a97-48a1-b367-213db152a317 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.873941] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2059.874128] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2059.875030] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4ce4794-ebb4-4d82-8d41-a140b6c527db {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.880562] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2059.880562] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52513e1b-20b0-0ee9-3b07-0bba7cb43d84" [ 2059.880562] env[62510]: _type = "Task" [ 2059.880562] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.888853] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52513e1b-20b0-0ee9-3b07-0bba7cb43d84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.201317] env[62510]: DEBUG oslo_vmware.api [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143853} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.201574] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2060.201758] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2060.201933] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2060.202123] env[62510]: INFO nova.compute.manager [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2060.202364] env[62510]: DEBUG oslo.service.loopingcall [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2060.202555] env[62510]: DEBUG nova.compute.manager [-] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2060.202650] env[62510]: DEBUG nova.network.neutron [-] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2060.367131] env[62510]: DEBUG oslo_concurrency.lockutils [req-a13c26fd-0835-45fd-a4af-0a582ff6f622 req-048a8437-ca7d-4929-aa4d-8718905938c3 service nova] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.391443] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52513e1b-20b0-0ee9-3b07-0bba7cb43d84, 'name': SearchDatastore_Task, 'duration_secs': 0.008404} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.392214] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30fe3dd4-7eba-4b6b-a4b4-2e91f0ab22da {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.397295] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2060.397295] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52f26141-f923-6f54-af62-e36e3bcc53a0" [ 2060.397295] env[62510]: _type = "Task" [ 2060.397295] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.404770] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f26141-f923-6f54-af62-e36e3bcc53a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.453820] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.454037] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.907618] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52f26141-f923-6f54-af62-e36e3bcc53a0, 'name': SearchDatastore_Task, 'duration_secs': 0.035412} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.907869] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2060.908139] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2060.908393] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44d67e9f-c4ff-490d-a798-49d414e7e304 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.915985] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2060.915985] env[62510]: value = "task-1769819" [ 2060.915985] env[62510]: _type = "Task" [ 2060.915985] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.923468] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.935061] env[62510]: DEBUG nova.network.neutron [-] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.956399] env[62510]: INFO nova.compute.manager [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Detaching volume 12c8daa9-e51f-421a-aba5-226b43033367 [ 2060.993547] env[62510]: INFO nova.virt.block_device [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Attempting to driver detach volume 12c8daa9-e51f-421a-aba5-226b43033367 from mountpoint /dev/sdb [ 2060.993851] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2060.993970] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367514', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'name': 'volume-12c8daa9-e51f-421a-aba5-226b43033367', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '31a181cd-b7cd-42c0-960d-e7d28987dc19', 'attached_at': '', 'detached_at': '', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'serial': '12c8daa9-e51f-421a-aba5-226b43033367'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2060.994861] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348ec7a2-e230-4443-88f5-e735e389e97d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.016946] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f0f4f3-c1be-40e4-bef2-b0e5c96dd723 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.024411] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aee652c-3f3f-404e-ba91-2a728fc02841 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.044377] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb4e2f5-eb87-4e07-ba73-a0d90479c517 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.060646] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] The volume has not been displaced from its original location: [datastore1] volume-12c8daa9-e51f-421a-aba5-226b43033367/volume-12c8daa9-e51f-421a-aba5-226b43033367.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2061.065859] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Reconfiguring VM instance instance-00000078 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2061.066185] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-002683a3-e55a-4846-8e8a-90f361cf4370 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.085157] env[62510]: DEBUG oslo_vmware.api [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2061.085157] env[62510]: value = "task-1769820" [ 2061.085157] env[62510]: _type = "Task" [ 2061.085157] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.092888] env[62510]: DEBUG oslo_vmware.api [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769820, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.189795] env[62510]: DEBUG nova.compute.manager [req-36894902-b4de-44ea-9124-b253bc20cdd8 req-bfcc6985-c5bb-4b43-9c71-f7ccd735e82d service nova] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Received event network-vif-deleted-405ea0bb-7824-446f-8b19-9d455a30b449 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2061.428521] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769819, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.438070] env[62510]: INFO nova.compute.manager [-] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Took 1.23 seconds to deallocate network for instance. [ 2061.595232] env[62510]: DEBUG oslo_vmware.api [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769820, 'name': ReconfigVM_Task, 'duration_secs': 0.255061} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.595532] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Reconfigured VM instance instance-00000078 to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2061.600428] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2901d7dd-5fa6-4495-8044-afb0d20d4bd1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.616767] env[62510]: DEBUG oslo_vmware.api [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2061.616767] env[62510]: value = "task-1769821" [ 2061.616767] env[62510]: _type = "Task" [ 2061.616767] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.624939] env[62510]: DEBUG oslo_vmware.api [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769821, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.926979] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599408} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.927205] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2061.927449] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2061.927701] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe6714fb-5d1c-4c90-9e83-723c2bbc8050 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.934482] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2061.934482] env[62510]: value = "task-1769822" [ 2061.934482] env[62510]: _type = "Task" [ 2061.934482] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.941949] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.943959] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.944244] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.944484] env[62510]: DEBUG nova.objects.instance [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'resources' on Instance uuid fe3b3380-69bb-4563-abf2-9f0db439d31a {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2062.126294] env[62510]: DEBUG oslo_vmware.api [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769821, 'name': ReconfigVM_Task, 'duration_secs': 0.136619} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.126607] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367514', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'name': 'volume-12c8daa9-e51f-421a-aba5-226b43033367', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '31a181cd-b7cd-42c0-960d-e7d28987dc19', 'attached_at': '', 'detached_at': '', 'volume_id': '12c8daa9-e51f-421a-aba5-226b43033367', 'serial': '12c8daa9-e51f-421a-aba5-226b43033367'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2062.444690] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059789} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.444994] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2062.445705] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3e6703-7f22-4d81-bf39-b2df4e615d55 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.470288] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2062.472989] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e399d74-7ed8-4684-b9cd-78f021ebaa76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.492145] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2062.492145] env[62510]: value = "task-1769823" [ 2062.492145] env[62510]: _type = "Task" [ 2062.492145] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.501138] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769823, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.546042] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5aa5d8-dad5-4f64-9b2a-d8510dd2555c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.552934] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aac1020-0e0a-4bdd-8de6-869507ee3d57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.583224] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00334569-de46-4890-92e4-133f9db3ffd7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.589837] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3d250e-dcbd-4679-8443-2b0fec1f2c5c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.602476] env[62510]: DEBUG nova.compute.provider_tree [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2062.667799] env[62510]: DEBUG nova.objects.instance [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'flavor' on Instance uuid 31a181cd-b7cd-42c0-960d-e7d28987dc19 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2063.004008] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769823, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.106136] env[62510]: DEBUG nova.scheduler.client.report [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2063.503210] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769823, 'name': ReconfigVM_Task, 'duration_secs': 0.756099} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.503534] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2063.504127] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73d39dbb-f121-4683-b593-58964488bb67 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.510710] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2063.510710] env[62510]: value = "task-1769824" [ 2063.510710] env[62510]: _type = "Task" [ 2063.510710] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.519128] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769824, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.611703] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.641851] env[62510]: INFO nova.scheduler.client.report [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted allocations for instance fe3b3380-69bb-4563-abf2-9f0db439d31a [ 2063.674196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-d29bcd0e-78a4-412d-a449-382f060d1150 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.220s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.021188] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769824, 'name': Rename_Task, 'duration_secs': 0.144214} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.021442] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2064.021630] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2be04424-b194-4734-a3e6-b868e4b1d60c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.027675] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2064.027675] env[62510]: value = "task-1769825" [ 2064.027675] env[62510]: _type = "Task" [ 2064.027675] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.034848] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.150556] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f56bb6af-c50a-4827-987a-efaa7d98f7f6 tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "fe3b3380-69bb-4563-abf2-9f0db439d31a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.583s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.538186] env[62510]: DEBUG oslo_vmware.api [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769825, 'name': PowerOnVM_Task, 'duration_secs': 0.420969} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.538535] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2064.538705] env[62510]: INFO nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Took 7.68 seconds to spawn the instance on the hypervisor. [ 2064.538815] env[62510]: DEBUG nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2064.539599] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f00db9-1f35-4e3a-999b-055142c4d855 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.694695] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.695196] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.695531] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "31a181cd-b7cd-42c0-960d-e7d28987dc19-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.695771] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.695969] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.698307] env[62510]: INFO nova.compute.manager [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Terminating instance [ 2065.057104] env[62510]: INFO nova.compute.manager [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Took 12.41 seconds to build instance. [ 2065.202605] env[62510]: DEBUG nova.compute.manager [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2065.202827] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2065.204099] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a137457a-d72d-4c0f-bad5-2a3459e26e37 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.211983] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2065.212231] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26ecdcd7-5e9c-47b1-b174-6123904a9839 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.218342] env[62510]: DEBUG oslo_vmware.api [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2065.218342] env[62510]: value = "task-1769827" [ 2065.218342] env[62510]: _type = "Task" [ 2065.218342] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.226877] env[62510]: DEBUG oslo_vmware.api [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.266677] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.266935] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2065.267136] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2065.559445] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0ce96582-f7c5-4485-980f-5eb6f6c0121e tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.916s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.727400] env[62510]: DEBUG oslo_vmware.api [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769827, 'name': PowerOffVM_Task, 'duration_secs': 0.19293} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.727676] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2065.727847] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2065.728118] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce345940-a69f-4a3a-b568-ac9699cd461d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.819769] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2065.819942] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2065.820108] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleting the datastore file [datastore1] 31a181cd-b7cd-42c0-960d-e7d28987dc19 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.820377] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-833fcb57-2e2c-4e3f-aa55-384d4aeb97c6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.828150] env[62510]: DEBUG oslo_vmware.api [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2065.828150] env[62510]: value = "task-1769829" [ 2065.828150] env[62510]: _type = "Task" [ 2065.828150] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.837285] env[62510]: DEBUG oslo_vmware.api [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.200108] env[62510]: DEBUG nova.compute.manager [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-changed-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2066.200334] env[62510]: DEBUG nova.compute.manager [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing instance network info cache due to event network-changed-47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2066.200816] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.200816] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.200931] env[62510]: DEBUG nova.network.neutron [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2066.338244] env[62510]: DEBUG oslo_vmware.api [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189665} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.338496] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2066.338680] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2066.338864] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2066.339056] env[62510]: INFO nova.compute.manager [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2066.339351] env[62510]: DEBUG oslo.service.loopingcall [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.339556] env[62510]: DEBUG nova.compute.manager [-] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2066.339661] env[62510]: DEBUG nova.network.neutron [-] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2066.859093] env[62510]: DEBUG nova.compute.manager [req-a4963d75-b377-48cd-bc12-6fc5d12666b1 req-19de09ab-5e80-400e-8352-04fea62dd231 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Received event network-vif-deleted-6ae6f8a4-f91b-4f3f-b94e-a75ba935075c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2066.859371] env[62510]: INFO nova.compute.manager [req-a4963d75-b377-48cd-bc12-6fc5d12666b1 req-19de09ab-5e80-400e-8352-04fea62dd231 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Neutron deleted interface 6ae6f8a4-f91b-4f3f-b94e-a75ba935075c; detaching it from the instance and deleting it from the info cache [ 2066.859528] env[62510]: DEBUG nova.network.neutron [req-a4963d75-b377-48cd-bc12-6fc5d12666b1 req-19de09ab-5e80-400e-8352-04fea62dd231 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.954611] env[62510]: DEBUG nova.network.neutron [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updated VIF entry in instance network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2066.955035] env[62510]: DEBUG nova.network.neutron [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.334972] env[62510]: DEBUG nova.network.neutron [-] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.362606] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d37b0a6-5d3e-4cfb-9206-366d0d7ce8e5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.372089] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4e262a-44e0-41d1-9189-1cead16ded66 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.399490] env[62510]: DEBUG nova.compute.manager [req-a4963d75-b377-48cd-bc12-6fc5d12666b1 req-19de09ab-5e80-400e-8352-04fea62dd231 service nova] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Detach interface failed, port_id=6ae6f8a4-f91b-4f3f-b94e-a75ba935075c, reason: Instance 31a181cd-b7cd-42c0-960d-e7d28987dc19 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2067.457769] env[62510]: DEBUG oslo_concurrency.lockutils [req-3a1a2b7a-57fb-4d27-99aa-e887ca2c1b36 req-810c1108-35fa-49fb-88e4-aa3c0e4ac8d0 service nova] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.837694] env[62510]: INFO nova.compute.manager [-] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Took 1.50 seconds to deallocate network for instance. [ 2068.254939] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.255319] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.255383] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.255523] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.255698] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.257891] env[62510]: INFO nova.compute.manager [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Terminating instance [ 2068.344239] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.344487] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.344702] env[62510]: DEBUG nova.objects.instance [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'resources' on Instance uuid 31a181cd-b7cd-42c0-960d-e7d28987dc19 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2068.762287] env[62510]: DEBUG nova.compute.manager [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2068.762287] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2068.762783] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0db06a6-ab5e-48f8-8623-c8edf9275b00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.770628] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2068.770843] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8aad07b5-7542-4076-b17a-a60feb35737d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.777072] env[62510]: DEBUG oslo_vmware.api [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2068.777072] env[62510]: value = "task-1769830" [ 2068.777072] env[62510]: _type = "Task" [ 2068.777072] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.785158] env[62510]: DEBUG oslo_vmware.api [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2068.922115] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0871484-8653-4506-9408-8e3bfb461336 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.930202] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514fdadf-c0a9-482f-9cca-6e1b635b03d4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.962109] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3014297-5ef6-4dd1-9d6d-5632273cd434 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.969802] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf01149-1861-41bf-bf3e-20149f356c82 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.983157] env[62510]: DEBUG nova.compute.provider_tree [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2069.179486] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.179731] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.291240] env[62510]: DEBUG oslo_vmware.api [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769830, 'name': PowerOffVM_Task, 'duration_secs': 0.255054} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.291646] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2069.291835] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2069.292139] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d05a48a1-eb34-4844-b344-74da5e09c3a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.486270] env[62510]: DEBUG nova.scheduler.client.report [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2069.682677] env[62510]: DEBUG nova.compute.utils [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2069.732882] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2069.733128] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2069.733333] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleting the datastore file [datastore1] 4e735bb6-f167-4c2b-b44e-d2dd3040603d {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2069.733580] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99996ead-3f9d-4357-b803-bcfc746d41fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.740451] env[62510]: DEBUG oslo_vmware.api [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for the task: (returnval){ [ 2069.740451] env[62510]: value = "task-1769832" [ 2069.740451] env[62510]: _type = "Task" [ 2069.740451] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.748064] env[62510]: DEBUG oslo_vmware.api [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769832, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.991332] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.647s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.009970] env[62510]: INFO nova.scheduler.client.report [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted allocations for instance 31a181cd-b7cd-42c0-960d-e7d28987dc19 [ 2070.186060] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.250157] env[62510]: DEBUG oslo_vmware.api [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Task: {'id': task-1769832, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234171} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2070.250393] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2070.250536] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2070.250716] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2070.250925] env[62510]: INFO nova.compute.manager [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Took 1.49 seconds to destroy the instance on the hypervisor. [ 2070.251183] env[62510]: DEBUG oslo.service.loopingcall [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2070.251371] env[62510]: DEBUG nova.compute.manager [-] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2070.251469] env[62510]: DEBUG nova.network.neutron [-] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2070.484670] env[62510]: DEBUG nova.compute.manager [req-feddf82d-8c86-42d7-a190-7382766791e0 req-f3803718-efa1-4061-821b-6bd8dfadddec service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Received event network-vif-deleted-a6e31bab-0459-42fe-8756-d37cc3fa3e88 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2070.484947] env[62510]: INFO nova.compute.manager [req-feddf82d-8c86-42d7-a190-7382766791e0 req-f3803718-efa1-4061-821b-6bd8dfadddec service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Neutron deleted interface a6e31bab-0459-42fe-8756-d37cc3fa3e88; detaching it from the instance and deleting it from the info cache [ 2070.485054] env[62510]: DEBUG nova.network.neutron [req-feddf82d-8c86-42d7-a190-7382766791e0 req-f3803718-efa1-4061-821b-6bd8dfadddec service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.516711] env[62510]: DEBUG oslo_concurrency.lockutils [None req-18d01955-a587-41ad-a4a7-c5a8adc92411 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "31a181cd-b7cd-42c0-960d-e7d28987dc19" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.822s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.800305] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Didn't find any instances for network info cache update. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10395}} [ 2070.800550] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.800750] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.800916] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.801079] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.801227] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.801376] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.801508] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 2070.801654] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.960940] env[62510]: DEBUG nova.network.neutron [-] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.988155] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-081e3c87-0106-4173-91d3-e21f5c0e49eb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.997695] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86cf05f-0c59-47cc-a675-d7b80b3d593f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.024146] env[62510]: DEBUG nova.compute.manager [req-feddf82d-8c86-42d7-a190-7382766791e0 req-f3803718-efa1-4061-821b-6bd8dfadddec service nova] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Detach interface failed, port_id=a6e31bab-0459-42fe-8756-d37cc3fa3e88, reason: Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2071.245679] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.246017] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.246267] env[62510]: INFO nova.compute.manager [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Attaching volume 06655831-5c13-4da1-904f-7991fa27b95c to /dev/sdb [ 2071.278736] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab20ec4-763a-4084-bd20-aaca11b03b3a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.286525] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f313cdd-96c1-4060-a084-c5a194dae04d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.300488] env[62510]: DEBUG nova.virt.block_device [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating existing volume attachment record: d06bb55d-d864-4aa3-b83e-49005ada9d37 {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2071.304372] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.304602] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.304798] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.306838] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2071.306838] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ffef64-0c2f-4cb7-8b67-beb7904f077f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.313360] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab412a5-fd2a-4459-92d5-5bccdeb7ca88 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.330541] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d17a44-f87d-455e-8f3d-f90e2e303f4d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.338462] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bf5ab4-b5f0-473e-95f3-9ce8d7ed3ee0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.369496] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180397MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2071.369687] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.369895] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.464028] env[62510]: INFO nova.compute.manager [-] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Took 1.21 seconds to deallocate network for instance. [ 2071.970115] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.394648] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.394849] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 94dd7eee-f799-4fb5-854b-e7d59621b125 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.395014] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 62417973-075e-4128-8eb5-4c62946856e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.395328] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9f3f72ba-60c9-48fb-917f-197e6fc8faef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2072.685636] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.685883] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.897796] env[62510]: INFO nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance ef20eba1-cb3a-4e0f-bbdb-54949e409546 has allocations against this compute host but is not found in the database. [ 2072.898017] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2072.898183] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2072.972974] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06188cc-424d-4ea3-920e-6aa2a6b63264 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.980755] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc146784-a49f-4550-91d6-757102634527 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.012718] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20e758f-da26-4ab5-95e4-8b3385a62a73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.020219] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d25988-fbe7-4be6-af1e-99423181d384 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.033157] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2073.188012] env[62510]: DEBUG nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2073.535936] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2073.706617] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2074.042033] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2074.042033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.672s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.042033] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.072s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.042033] env[62510]: DEBUG nova.objects.instance [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lazy-loading 'resources' on Instance uuid 4e735bb6-f167-4c2b-b44e-d2dd3040603d {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2074.605079] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7347e79c-9b02-4556-a469-2ef158120330 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.613095] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eee8707-f3f2-4d39-b1d8-6f4cb64f8cf8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.643860] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f31a7a-b16f-4b1e-a23b-420929b99f93 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.650902] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e6d957-e18c-4407-9a97-3847b0b35ef6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.663451] env[62510]: DEBUG nova.compute.provider_tree [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2075.166879] env[62510]: DEBUG nova.scheduler.client.report [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2075.672234] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.629s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.673658] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.967s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.675583] env[62510]: INFO nova.compute.claims [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2075.692232] env[62510]: INFO nova.scheduler.client.report [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Deleted allocations for instance 4e735bb6-f167-4c2b-b44e-d2dd3040603d [ 2075.849078] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2075.849327] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367516', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'name': 'volume-06655831-5c13-4da1-904f-7991fa27b95c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'serial': '06655831-5c13-4da1-904f-7991fa27b95c'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2075.850223] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a5f336-bc14-4bce-995f-5ff05441261b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.866558] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb42e44-60cf-4a11-9e80-0d910151c5f8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.891535] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-06655831-5c13-4da1-904f-7991fa27b95c/volume-06655831-5c13-4da1-904f-7991fa27b95c.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2075.891763] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c5addba-c22a-4d10-a095-5ea2d328f377 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.912046] env[62510]: DEBUG oslo_vmware.api [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2075.912046] env[62510]: value = "task-1769835" [ 2075.912046] env[62510]: _type = "Task" [ 2075.912046] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.921792] env[62510]: DEBUG oslo_vmware.api [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769835, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.199184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-f86c28c8-7e29-4cf3-b070-7d366a78821b tempest-ServerActionsTestOtherA-606697029 tempest-ServerActionsTestOtherA-606697029-project-member] Lock "4e735bb6-f167-4c2b-b44e-d2dd3040603d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.944s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.421572] env[62510]: DEBUG oslo_vmware.api [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769835, 'name': ReconfigVM_Task, 'duration_secs': 0.338874} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.421780] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-06655831-5c13-4da1-904f-7991fa27b95c/volume-06655831-5c13-4da1-904f-7991fa27b95c.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2076.426509] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d46fe2c-2c2a-40ba-bd7a-43e5d8395429 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.441350] env[62510]: DEBUG oslo_vmware.api [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2076.441350] env[62510]: value = "task-1769836" [ 2076.441350] env[62510]: _type = "Task" [ 2076.441350] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.448658] env[62510]: DEBUG oslo_vmware.api [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769836, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.749013] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c3dfe64-5b98-4780-ac7b-34b2a7683608 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.757275] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3da0775-4579-48a0-ab4d-9fe38d9cc2a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.787443] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8fb210-7a60-4d14-9edc-57267f4abd79 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.795205] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cab1f28-9c71-4cb0-b756-f37186d2fc3d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.809089] env[62510]: DEBUG nova.compute.provider_tree [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2076.952938] env[62510]: DEBUG oslo_vmware.api [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769836, 'name': ReconfigVM_Task, 'duration_secs': 0.130901} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.953256] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367516', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'name': 'volume-06655831-5c13-4da1-904f-7991fa27b95c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'serial': '06655831-5c13-4da1-904f-7991fa27b95c'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2077.312007] env[62510]: DEBUG nova.scheduler.client.report [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2077.817198] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.143s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.817656] env[62510]: DEBUG nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2078.002203] env[62510]: DEBUG nova.objects.instance [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 94dd7eee-f799-4fb5-854b-e7d59621b125 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2078.323420] env[62510]: DEBUG nova.compute.utils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2078.324965] env[62510]: DEBUG nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2078.325055] env[62510]: DEBUG nova.network.neutron [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2078.377700] env[62510]: DEBUG nova.policy [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93399cd69f4245188fd39bde29ee3d5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11c021c6b45c452f83732fe578e576f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2078.506554] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c78debfd-6139-4c64-98e7-421d41949ade tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.260s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.649604] env[62510]: DEBUG nova.network.neutron [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Successfully created port: 14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2078.828096] env[62510]: DEBUG nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2079.368198] env[62510]: DEBUG oslo_concurrency.lockutils [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.368477] env[62510]: DEBUG oslo_concurrency.lockutils [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.839641] env[62510]: DEBUG nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2079.865467] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2079.865711] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2079.865869] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2079.866062] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2079.866211] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2079.866355] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2079.866561] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2079.866719] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2079.866881] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2079.867056] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2079.867239] env[62510]: DEBUG nova.virt.hardware [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2079.868112] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9b9aa6-2cbd-4de2-b73a-fdad55684ab0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.871337] env[62510]: DEBUG nova.compute.utils [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2079.878328] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5edafe-b76a-48e6-9644-9e92b0994e47 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.024651] env[62510]: DEBUG nova.compute.manager [req-a5148094-92bb-4a91-a542-e3fb815cd273 req-dbd23816-e2e4-4b5b-9a75-01489c342925 service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Received event network-vif-plugged-14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2080.024878] env[62510]: DEBUG oslo_concurrency.lockutils [req-a5148094-92bb-4a91-a542-e3fb815cd273 req-dbd23816-e2e4-4b5b-9a75-01489c342925 service nova] Acquiring lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.025450] env[62510]: DEBUG oslo_concurrency.lockutils [req-a5148094-92bb-4a91-a542-e3fb815cd273 req-dbd23816-e2e4-4b5b-9a75-01489c342925 service nova] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.025705] env[62510]: DEBUG oslo_concurrency.lockutils [req-a5148094-92bb-4a91-a542-e3fb815cd273 req-dbd23816-e2e4-4b5b-9a75-01489c342925 service nova] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.025780] env[62510]: DEBUG nova.compute.manager [req-a5148094-92bb-4a91-a542-e3fb815cd273 req-dbd23816-e2e4-4b5b-9a75-01489c342925 service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] No waiting events found dispatching network-vif-plugged-14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2080.025962] env[62510]: WARNING nova.compute.manager [req-a5148094-92bb-4a91-a542-e3fb815cd273 req-dbd23816-e2e4-4b5b-9a75-01489c342925 service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Received unexpected event network-vif-plugged-14966b8d-39b1-4552-9912-c1897a3946b2 for instance with vm_state building and task_state spawning. [ 2080.373735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.589183] env[62510]: DEBUG nova.network.neutron [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Successfully updated port: 14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2080.614034] env[62510]: DEBUG nova.compute.manager [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Received event network-changed-14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2080.614034] env[62510]: DEBUG nova.compute.manager [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Refreshing instance network info cache due to event network-changed-14966b8d-39b1-4552-9912-c1897a3946b2. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2080.614364] env[62510]: DEBUG oslo_concurrency.lockutils [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] Acquiring lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2080.614364] env[62510]: DEBUG oslo_concurrency.lockutils [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] Acquired lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2080.614495] env[62510]: DEBUG nova.network.neutron [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Refreshing network info cache for port 14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2081.091379] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.145464] env[62510]: DEBUG nova.network.neutron [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2081.216246] env[62510]: DEBUG nova.network.neutron [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.437624] env[62510]: DEBUG oslo_concurrency.lockutils [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.437946] env[62510]: DEBUG oslo_concurrency.lockutils [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.438135] env[62510]: INFO nova.compute.manager [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Attaching volume e2bf7b9e-c51d-4087-b014-e28cafbc1fa8 to /dev/sdc [ 2081.468090] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347306d1-a073-4724-94a3-d03b5b67d657 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.475258] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf95b018-e715-40ed-a3e1-e804b4150cb2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.488417] env[62510]: DEBUG nova.virt.block_device [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating existing volume attachment record: fc639c40-79f0-4192-ac16-d2bfcb81804b {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2081.719365] env[62510]: DEBUG oslo_concurrency.lockutils [req-bbc1d44c-de3a-4d31-ae76-f3c62426b1bb req-eec043fd-6ebd-484d-988d-49bd770114ed service nova] Releasing lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.719789] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.719976] env[62510]: DEBUG nova.network.neutron [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2082.250253] env[62510]: DEBUG nova.network.neutron [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2082.375050] env[62510]: DEBUG nova.network.neutron [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Updating instance_info_cache with network_info: [{"id": "14966b8d-39b1-4552-9912-c1897a3946b2", "address": "fa:16:3e:aa:de:7f", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14966b8d-39", "ovs_interfaceid": "14966b8d-39b1-4552-9912-c1897a3946b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2082.878190] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.878531] env[62510]: DEBUG nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Instance network_info: |[{"id": "14966b8d-39b1-4552-9912-c1897a3946b2", "address": "fa:16:3e:aa:de:7f", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14966b8d-39", "ovs_interfaceid": "14966b8d-39b1-4552-9912-c1897a3946b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2082.878985] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:de:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6eb7e3e9-5cc2-40f1-a6eb-f70f06531667', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14966b8d-39b1-4552-9912-c1897a3946b2', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2082.886610] env[62510]: DEBUG oslo.service.loopingcall [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.886787] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2082.887032] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a03414dc-af00-4173-aef6-b868369a7cb8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.906407] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2082.906407] env[62510]: value = "task-1769838" [ 2082.906407] env[62510]: _type = "Task" [ 2082.906407] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.913686] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769838, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.417932] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769838, 'name': CreateVM_Task, 'duration_secs': 0.313159} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.418113] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2083.418753] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.418917] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.419254] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2083.419494] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08c44b59-6d66-4eaf-af9f-fe1ca7a76a3d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.423760] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2083.423760] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52b2abe6-7293-26f6-2d30-b541311aa208" [ 2083.423760] env[62510]: _type = "Task" [ 2083.423760] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.431076] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b2abe6-7293-26f6-2d30-b541311aa208, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.934346] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52b2abe6-7293-26f6-2d30-b541311aa208, 'name': SearchDatastore_Task, 'duration_secs': 0.011504} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.934621] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.934721] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2083.934952] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.935112] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.935305] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2083.935591] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbea8ec0-642c-4975-8e5e-95b819162f87 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.943951] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2083.944820] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2083.944910] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4426cef-b9cd-4dea-9432-cb79906e1f22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.950034] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2083.950034] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]521466c8-0241-4f3a-bba3-69ce18b0bbcd" [ 2083.950034] env[62510]: _type = "Task" [ 2083.950034] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.957463] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521466c8-0241-4f3a-bba3-69ce18b0bbcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.460807] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]521466c8-0241-4f3a-bba3-69ce18b0bbcd, 'name': SearchDatastore_Task, 'duration_secs': 0.008078} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.461624] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3097c9be-cdc7-4667-ab2d-42265e6fd698 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.466728] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2084.466728] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]523b2a12-b211-a815-82fb-69173f2a1a4a" [ 2084.466728] env[62510]: _type = "Task" [ 2084.466728] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.474130] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523b2a12-b211-a815-82fb-69173f2a1a4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.976949] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]523b2a12-b211-a815-82fb-69173f2a1a4a, 'name': SearchDatastore_Task, 'duration_secs': 0.009783} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.977363] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.977474] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] ef20eba1-cb3a-4e0f-bbdb-54949e409546/ef20eba1-cb3a-4e0f-bbdb-54949e409546.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2084.977825] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2084.979070] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31a8a1f5-728b-4a07-bbee-ea8e98a5fa73 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.986284] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2084.986284] env[62510]: value = "task-1769840" [ 2084.986284] env[62510]: _type = "Task" [ 2084.986284] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.993591] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.485324] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.485558] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2085.485634] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 2085.496765] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769840, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.992954] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Skipping network cache update for instance because it is Building. {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10322}} [ 2086.000196] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71022} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.000429] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] ef20eba1-cb3a-4e0f-bbdb-54949e409546/ef20eba1-cb3a-4e0f-bbdb-54949e409546.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2086.000653] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2086.000897] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d004f7fc-0408-44cc-8ebe-e994128deae2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.008793] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2086.008793] env[62510]: value = "task-1769841" [ 2086.008793] env[62510]: _type = "Task" [ 2086.008793] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.016128] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769841, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.019713] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2086.019856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2086.019997] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2086.020279] env[62510]: DEBUG nova.objects.instance [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lazy-loading 'info_cache' on Instance uuid 94dd7eee-f799-4fb5-854b-e7d59621b125 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2086.030623] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2086.030843] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367517', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'name': 'volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'serial': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2086.031671] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5712b86f-0243-4e99-8695-57292faae41a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.048080] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea182b14-b3ea-445b-8c55-60b09367221f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.076399] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8/volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2086.076637] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4db408bf-2338-40c1-b36a-e941e8c83062 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.094368] env[62510]: DEBUG oslo_vmware.api [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2086.094368] env[62510]: value = "task-1769842" [ 2086.094368] env[62510]: _type = "Task" [ 2086.094368] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.101924] env[62510]: DEBUG oslo_vmware.api [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769842, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.518457] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769841, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068399} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.518683] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2086.519441] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cf066f-788e-4de3-b4e6-73246ee0aef9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.541135] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] ef20eba1-cb3a-4e0f-bbdb-54949e409546/ef20eba1-cb3a-4e0f-bbdb-54949e409546.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2086.541841] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1177939d-0c3c-4da5-8c7a-9cb401a5912f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.561751] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2086.561751] env[62510]: value = "task-1769843" [ 2086.561751] env[62510]: _type = "Task" [ 2086.561751] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.571640] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769843, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.603584] env[62510]: DEBUG oslo_vmware.api [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769842, 'name': ReconfigVM_Task, 'duration_secs': 0.345338} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.603876] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8/volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2086.608701] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-381657d7-47d3-4169-9b8e-2b2a198a9400 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.623410] env[62510]: DEBUG oslo_vmware.api [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2086.623410] env[62510]: value = "task-1769844" [ 2086.623410] env[62510]: _type = "Task" [ 2086.623410] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.631537] env[62510]: DEBUG oslo_vmware.api [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769844, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.071783] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769843, 'name': ReconfigVM_Task, 'duration_secs': 0.289717} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.072143] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Reconfigured VM instance instance-0000007d to attach disk [datastore1] ef20eba1-cb3a-4e0f-bbdb-54949e409546/ef20eba1-cb3a-4e0f-bbdb-54949e409546.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2087.072648] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0490df60-48cf-4a40-be93-7096cbf2b52c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.078546] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2087.078546] env[62510]: value = "task-1769845" [ 2087.078546] env[62510]: _type = "Task" [ 2087.078546] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.086219] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769845, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.132964] env[62510]: DEBUG oslo_vmware.api [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769844, 'name': ReconfigVM_Task, 'duration_secs': 0.147226} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.133325] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367517', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'name': 'volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'serial': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2088.268317] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769845, 'name': Rename_Task, 'duration_secs': 0.145674} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.269039] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2088.269289] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a99abdb0-a623-4ed3-82e8-728c2df59896 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.275258] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2088.275258] env[62510]: value = "task-1769846" [ 2088.275258] env[62510]: _type = "Task" [ 2088.275258] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.282933] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769846, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.407905] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating instance_info_cache with network_info: [{"id": "12cbde53-7f97-41bf-818c-04b6c994d690", "address": "fa:16:3e:75:92:c1", "network": {"id": "2193bc16-0e54-4910-9194-2724652b0e5d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1870939634-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59300e0f20144d9f88b78f7c971e86c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbdab640-5fea-4254-8bd3-f855b7eaca0d", "external-id": "nsx-vlan-transportzone-615", "segmentation_id": 615, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12cbde53-7f", "ovs_interfaceid": "12cbde53-7f97-41bf-818c-04b6c994d690", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.764806] env[62510]: DEBUG nova.compute.manager [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Stashing vm_state: active {{(pid=62510) _prep_resize /opt/stack/nova/nova/compute/manager.py:5998}} [ 2088.784992] env[62510]: DEBUG oslo_vmware.api [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769846, 'name': PowerOnVM_Task, 'duration_secs': 0.442504} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.785264] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2088.785465] env[62510]: INFO nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Took 8.95 seconds to spawn the instance on the hypervisor. [ 2088.785640] env[62510]: DEBUG nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2088.786381] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f42d9f-1675-4b5c-9c7d-8850a66dd57d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.789221] env[62510]: DEBUG nova.objects.instance [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 94dd7eee-f799-4fb5-854b-e7d59621b125 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2088.910951] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-94dd7eee-f799-4fb5-854b-e7d59621b125" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.911148] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 2089.285967] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.286311] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.293463] env[62510]: DEBUG oslo_concurrency.lockutils [None req-75ff0315-70bb-45b2-83fa-90591a0b8f7c tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.856s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.305903] env[62510]: INFO nova.compute.manager [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Took 15.61 seconds to build instance. [ 2089.792029] env[62510]: INFO nova.compute.claims [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2089.808071] env[62510]: DEBUG oslo_concurrency.lockutils [None req-486d863b-39bb-4c46-aa77-d6f73502f646 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.122s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.000796] env[62510]: DEBUG oslo_concurrency.lockutils [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.000796] env[62510]: DEBUG oslo_concurrency.lockutils [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.049428] env[62510]: DEBUG nova.compute.manager [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Received event network-changed-14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2090.049428] env[62510]: DEBUG nova.compute.manager [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Refreshing instance network info cache due to event network-changed-14966b8d-39b1-4552-9912-c1897a3946b2. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2090.049760] env[62510]: DEBUG oslo_concurrency.lockutils [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] Acquiring lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.049760] env[62510]: DEBUG oslo_concurrency.lockutils [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] Acquired lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.049936] env[62510]: DEBUG nova.network.neutron [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Refreshing network info cache for port 14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2090.298425] env[62510]: INFO nova.compute.resource_tracker [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating resource usage from migration 5466af3c-25cd-473d-96df-5f910ce643d7 [ 2090.370541] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3187e7d2-1898-481f-b18c-992f9c55b90a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.378145] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a45fb53-e1a9-4af5-8d48-b88950126c67 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.406648] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cddaaa9-1b5a-4b1a-a312-015a116a983e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.413185] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1cf6f7-4790-415c-9e5a-c842750e2747 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.425420] env[62510]: DEBUG nova.compute.provider_tree [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2090.502744] env[62510]: INFO nova.compute.manager [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Detaching volume 06655831-5c13-4da1-904f-7991fa27b95c [ 2090.537811] env[62510]: INFO nova.virt.block_device [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Attempting to driver detach volume 06655831-5c13-4da1-904f-7991fa27b95c from mountpoint /dev/sdb [ 2090.538064] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2090.538258] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367516', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'name': 'volume-06655831-5c13-4da1-904f-7991fa27b95c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'serial': '06655831-5c13-4da1-904f-7991fa27b95c'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2090.539122] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5753f44d-ebd8-4e7c-a7a6-0746e65ef005 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.566787] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5236e8d-2820-41b4-a32e-0315ea7fc8a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.574587] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa2ae17-c8cc-46a5-bb6b-06c48803e0ea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.599997] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e77a05-c272-4e54-be28-5c83e5f7be96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.615461] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] The volume has not been displaced from its original location: [datastore1] volume-06655831-5c13-4da1-904f-7991fa27b95c/volume-06655831-5c13-4da1-904f-7991fa27b95c.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2090.621181] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfiguring VM instance instance-0000007a to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2090.621542] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02c880d2-bbda-4ffe-9fa0-f1338afe9287 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.642099] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2090.642099] env[62510]: value = "task-1769847" [ 2090.642099] env[62510]: _type = "Task" [ 2090.642099] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.649717] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769847, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.787292] env[62510]: DEBUG nova.network.neutron [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Updated VIF entry in instance network info cache for port 14966b8d-39b1-4552-9912-c1897a3946b2. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2090.787696] env[62510]: DEBUG nova.network.neutron [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Updating instance_info_cache with network_info: [{"id": "14966b8d-39b1-4552-9912-c1897a3946b2", "address": "fa:16:3e:aa:de:7f", "network": {"id": "e420cc26-6a46-4189-b24c-78c39b6b4d50", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-234097015-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "11c021c6b45c452f83732fe578e576f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14966b8d-39", "ovs_interfaceid": "14966b8d-39b1-4552-9912-c1897a3946b2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2090.928242] env[62510]: DEBUG nova.scheduler.client.report [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2091.152530] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769847, 'name': ReconfigVM_Task, 'duration_secs': 0.248009} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.152815] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfigured VM instance instance-0000007a to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2091.157683] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b803c6b5-fa47-4013-b47f-54f75fcfbcea {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.173462] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2091.173462] env[62510]: value = "task-1769848" [ 2091.173462] env[62510]: _type = "Task" [ 2091.173462] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.181748] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769848, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.290314] env[62510]: DEBUG oslo_concurrency.lockutils [req-772cbd66-2d0f-4c42-b521-fb55eb2ea135 req-9b1b391c-7ed1-444b-8dfb-3d51d5daabbc service nova] Releasing lock "refresh_cache-ef20eba1-cb3a-4e0f-bbdb-54949e409546" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.434036] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.147s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.434430] env[62510]: INFO nova.compute.manager [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Migrating [ 2091.683601] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769848, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.952607] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.952796] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.952978] env[62510]: DEBUG nova.network.neutron [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2092.184625] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769848, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.652665] env[62510]: DEBUG nova.network.neutron [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.684899] env[62510]: DEBUG oslo_vmware.api [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769848, 'name': ReconfigVM_Task, 'duration_secs': 1.140795} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.685198] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367516', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'name': 'volume-06655831-5c13-4da1-904f-7991fa27b95c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': '06655831-5c13-4da1-904f-7991fa27b95c', 'serial': '06655831-5c13-4da1-904f-7991fa27b95c'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2093.156025] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.224332] env[62510]: DEBUG nova.objects.instance [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 94dd7eee-f799-4fb5-854b-e7d59621b125 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2094.230741] env[62510]: DEBUG oslo_concurrency.lockutils [None req-803a4bba-74b0-47d0-8d61-4f42d12ecb44 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.231s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.259680] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.259900] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.670137] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81696352-3c63-43f7-8a0d-d91fa447e27a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.688804] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance '62417973-075e-4128-8eb5-4c62946856e7' progress to 0 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2094.763028] env[62510]: INFO nova.compute.manager [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Detaching volume e2bf7b9e-c51d-4087-b014-e28cafbc1fa8 [ 2094.795919] env[62510]: INFO nova.virt.block_device [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Attempting to driver detach volume e2bf7b9e-c51d-4087-b014-e28cafbc1fa8 from mountpoint /dev/sdc [ 2094.796192] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2094.796419] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367517', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'name': 'volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'serial': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2094.797322] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d0f059-8113-4dd0-a6c6-b4edbceb1ec7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.820291] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55579d9f-17d5-4fc3-81f7-bb72632ccbbb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.827801] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e883cc-dad2-4369-9aa1-66e016de1b00 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.848157] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ab5beb-1886-4825-808c-fc82997a2f1e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.862891] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] The volume has not been displaced from its original location: [datastore1] volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8/volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2094.868156] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfiguring VM instance instance-0000007a to detach disk 2002 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2094.868470] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9098ab6-555b-479b-afae-943314823e40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.887090] env[62510]: DEBUG oslo_vmware.api [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2094.887090] env[62510]: value = "task-1769849" [ 2094.887090] env[62510]: _type = "Task" [ 2094.887090] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.894944] env[62510]: DEBUG oslo_vmware.api [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.195753] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2095.196085] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1041b34a-7015-45b7-9b6d-b33759a16da1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.203659] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2095.203659] env[62510]: value = "task-1769850" [ 2095.203659] env[62510]: _type = "Task" [ 2095.203659] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.213366] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.397891] env[62510]: DEBUG oslo_vmware.api [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769849, 'name': ReconfigVM_Task, 'duration_secs': 0.23854} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.398295] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Reconfigured VM instance instance-0000007a to detach disk 2002 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2095.403276] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d472bfd6-be24-4743-afc1-5c95de17aaa9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.419778] env[62510]: DEBUG oslo_vmware.api [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2095.419778] env[62510]: value = "task-1769851" [ 2095.419778] env[62510]: _type = "Task" [ 2095.419778] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.428401] env[62510]: DEBUG oslo_vmware.api [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769851, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.713466] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769850, 'name': PowerOffVM_Task, 'duration_secs': 0.199299} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.713703] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2095.713879] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance '62417973-075e-4128-8eb5-4c62946856e7' progress to 17 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2095.929944] env[62510]: DEBUG oslo_vmware.api [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769851, 'name': ReconfigVM_Task, 'duration_secs': 0.134005} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.930254] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367517', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'name': 'volume-e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '94dd7eee-f799-4fb5-854b-e7d59621b125', 'attached_at': '', 'detached_at': '', 'volume_id': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8', 'serial': 'e2bf7b9e-c51d-4087-b014-e28cafbc1fa8'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2096.220944] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:41Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2096.221221] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2096.221366] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2096.221522] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2096.221665] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2096.221814] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2096.222022] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2096.222240] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2096.222417] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2096.222579] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2096.222752] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2096.227754] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2ce6fd4-3eef-41f7-aba9-db57f82042cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.243680] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2096.243680] env[62510]: value = "task-1769852" [ 2096.243680] env[62510]: _type = "Task" [ 2096.243680] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.252146] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769852, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.472380] env[62510]: DEBUG nova.objects.instance [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'flavor' on Instance uuid 94dd7eee-f799-4fb5-854b-e7d59621b125 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2096.753603] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769852, 'name': ReconfigVM_Task, 'duration_secs': 0.25976} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.753923] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance '62417973-075e-4128-8eb5-4c62946856e7' progress to 33 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2097.260163] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2097.260427] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2097.260578] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2097.260770] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2097.260955] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2097.261113] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2097.261319] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2097.261483] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2097.261688] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2097.261879] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2097.262070] env[62510]: DEBUG nova.virt.hardware [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2097.267432] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2097.267716] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98d04eb0-5930-4de2-a5a5-47398a84ebfd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.287531] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2097.287531] env[62510]: value = "task-1769853" [ 2097.287531] env[62510]: _type = "Task" [ 2097.287531] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.295241] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769853, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.479914] env[62510]: DEBUG oslo_concurrency.lockutils [None req-0f05057f-afcb-422d-bfb0-47da8b31dcda tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.219s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.801303] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769853, 'name': ReconfigVM_Task, 'duration_secs': 0.162663} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.801715] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2097.802895] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a7f820-2032-4298-b19a-29a9572b3d0c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.839553] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2097.839991] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba35358d-34bc-4764-b248-284422d97946 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.870250] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2097.870250] env[62510]: value = "task-1769854" [ 2097.870250] env[62510]: _type = "Task" [ 2097.870250] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.884281] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769854, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.385113] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769854, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.654716] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.655260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.655544] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "94dd7eee-f799-4fb5-854b-e7d59621b125-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.655910] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.656260] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.659348] env[62510]: INFO nova.compute.manager [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Terminating instance [ 2098.882020] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769854, 'name': ReconfigVM_Task, 'duration_secs': 0.522302} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.882020] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2098.882318] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance '62417973-075e-4128-8eb5-4c62946856e7' progress to 50 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2099.163896] env[62510]: DEBUG nova.compute.manager [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2099.164114] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2099.164968] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792586c6-831b-4d4d-b9d4-520b68806c44 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.173159] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2099.173437] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a73ff34-00e6-4fb4-af18-be4689e0ca6e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.179824] env[62510]: DEBUG oslo_vmware.api [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2099.179824] env[62510]: value = "task-1769855" [ 2099.179824] env[62510]: _type = "Task" [ 2099.179824] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.188019] env[62510]: DEBUG oslo_vmware.api [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769855, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.388785] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5ef563-9356-4653-ad7d-8da870298834 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.407227] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242008e0-3e45-425f-8b2d-b2494c5eed06 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.424053] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance '62417973-075e-4128-8eb5-4c62946856e7' progress to 67 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2099.689535] env[62510]: DEBUG oslo_vmware.api [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769855, 'name': PowerOffVM_Task, 'duration_secs': 0.184703} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.689837] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2099.690023] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2099.690270] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5840d1e-b890-4e8f-9a40-d32f2f296bec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.798018] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2099.798308] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2099.798435] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Deleting the datastore file [datastore1] 94dd7eee-f799-4fb5-854b-e7d59621b125 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2099.798700] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3823cc0a-9c77-4efb-b83b-848df128ee2e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.804895] env[62510]: DEBUG oslo_vmware.api [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for the task: (returnval){ [ 2099.804895] env[62510]: value = "task-1769857" [ 2099.804895] env[62510]: _type = "Task" [ 2099.804895] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.811962] env[62510]: DEBUG oslo_vmware.api [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769857, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.964345] env[62510]: DEBUG nova.network.neutron [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Port 7afce004-8ac8-4715-a27d-7e5162c006ba binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2100.315024] env[62510]: DEBUG oslo_vmware.api [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Task: {'id': task-1769857, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150674} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.315211] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2100.315388] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2100.315562] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2100.315736] env[62510]: INFO nova.compute.manager [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2100.315971] env[62510]: DEBUG oslo.service.loopingcall [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2100.316180] env[62510]: DEBUG nova.compute.manager [-] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2100.316298] env[62510]: DEBUG nova.network.neutron [-] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2100.808558] env[62510]: DEBUG nova.compute.manager [req-4a01bf4a-c2d9-438c-a166-9240b2f91f93 req-f85b453e-c38b-4f78-a18e-95259c964469 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Received event network-vif-deleted-12cbde53-7f97-41bf-818c-04b6c994d690 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2100.808847] env[62510]: INFO nova.compute.manager [req-4a01bf4a-c2d9-438c-a166-9240b2f91f93 req-f85b453e-c38b-4f78-a18e-95259c964469 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Neutron deleted interface 12cbde53-7f97-41bf-818c-04b6c994d690; detaching it from the instance and deleting it from the info cache [ 2100.808932] env[62510]: DEBUG nova.network.neutron [req-4a01bf4a-c2d9-438c-a166-9240b2f91f93 req-f85b453e-c38b-4f78-a18e-95259c964469 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.986856] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "62417973-075e-4128-8eb5-4c62946856e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.987110] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.987285] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.285293] env[62510]: DEBUG nova.network.neutron [-] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.311951] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c37af99b-ef0b-4026-b50b-8fde18f63b30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.322125] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38498c9e-729d-4d09-91db-070b3584c4c3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.349484] env[62510]: DEBUG nova.compute.manager [req-4a01bf4a-c2d9-438c-a166-9240b2f91f93 req-f85b453e-c38b-4f78-a18e-95259c964469 service nova] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Detach interface failed, port_id=12cbde53-7f97-41bf-818c-04b6c994d690, reason: Instance 94dd7eee-f799-4fb5-854b-e7d59621b125 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2101.788038] env[62510]: INFO nova.compute.manager [-] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Took 1.47 seconds to deallocate network for instance. [ 2102.026886] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.027174] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.027346] env[62510]: DEBUG nova.network.neutron [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2102.294083] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.294375] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.294582] env[62510]: DEBUG nova.objects.instance [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lazy-loading 'resources' on Instance uuid 94dd7eee-f799-4fb5-854b-e7d59621b125 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2102.500313] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.500593] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.500755] env[62510]: INFO nova.compute.manager [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Shelving [ 2102.744963] env[62510]: DEBUG nova.network.neutron [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.862006] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae602b2-5d9c-412b-82b9-f8fc7aed2c6d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.869632] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3479b1f1-d5d2-40d4-bef1-19bef9fd0ddd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.899760] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5c0403-cace-4fd0-a40f-8f45cd6231d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.906460] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096cbd75-91de-489d-8d6f-88f5e55224df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.919215] env[62510]: DEBUG nova.compute.provider_tree [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2103.247988] env[62510]: DEBUG oslo_concurrency.lockutils [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2103.422642] env[62510]: DEBUG nova.scheduler.client.report [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2103.510016] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2103.510314] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4426766e-8dd6-42c6-b14f-f0d0e6184e5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.518642] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2103.518642] env[62510]: value = "task-1769858" [ 2103.518642] env[62510]: _type = "Task" [ 2103.518642] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2103.526223] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769858, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2103.768801] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff4d572-27e6-4a1e-8f61-97d3f70c2439 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.787470] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0935767e-2610-46ec-a128-902ad2e2cd33 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.794308] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance '62417973-075e-4128-8eb5-4c62946856e7' progress to 83 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2103.927513] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.947486] env[62510]: INFO nova.scheduler.client.report [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Deleted allocations for instance 94dd7eee-f799-4fb5-854b-e7d59621b125 [ 2104.028631] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769858, 'name': PowerOffVM_Task, 'duration_secs': 0.196934} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2104.028880] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2104.029634] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03de18b3-e7ed-41e6-b5c6-8c0c5069b630 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.048553] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5506fcf6-33ac-4c22-8d7d-eb7722ba08aa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.300223] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2104.300527] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7554012-f01a-4220-815e-bc473e4bd253 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.308081] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2104.308081] env[62510]: value = "task-1769859" [ 2104.308081] env[62510]: _type = "Task" [ 2104.308081] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.315764] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769859, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.456714] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c1b4cbb7-eb95-4b90-959b-4464c3aba173 tempest-AttachVolumeTestJSON-303590859 tempest-AttachVolumeTestJSON-303590859-project-member] Lock "94dd7eee-f799-4fb5-854b-e7d59621b125" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.802s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.560719] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Creating Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2104.561024] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4f408f5b-1328-47be-8283-a150e2a37bec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.569049] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2104.569049] env[62510]: value = "task-1769860" [ 2104.569049] env[62510]: _type = "Task" [ 2104.569049] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.577170] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769860, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.818456] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769859, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.079459] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769860, 'name': CreateSnapshot_Task, 'duration_secs': 0.497861} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.079459] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Created Snapshot of the VM instance {{(pid=62510) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2105.080079] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08aa1a2-c94b-429f-ba03-19aec8ea4889 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.318672] env[62510]: DEBUG oslo_vmware.api [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769859, 'name': PowerOnVM_Task, 'duration_secs': 0.51494} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.318961] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2105.319132] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-763f794c-f3d4-4650-8bc3-7a048e571637 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance '62417973-075e-4128-8eb5-4c62946856e7' progress to 100 {{(pid=62510) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2105.597857] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Creating linked-clone VM from snapshot {{(pid=62510) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2105.598184] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f794a520-af96-4265-a4b7-370f85a9e58e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.606773] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2105.606773] env[62510]: value = "task-1769862" [ 2105.606773] env[62510]: _type = "Task" [ 2105.606773] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.615225] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769862, 'name': CloneVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.116598] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769862, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.618364] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769862, 'name': CloneVM_Task} progress is 94%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.118412] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769862, 'name': CloneVM_Task, 'duration_secs': 1.419179} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.118717] env[62510]: INFO nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Created linked-clone VM from snapshot [ 2107.119629] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40736bdb-52f2-45e5-979d-49bcfa1cabcd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.127257] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Uploading image aaa462ff-6524-46df-a138-b1e43c03f689 {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2107.152678] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2107.152678] env[62510]: value = "vm-367520" [ 2107.152678] env[62510]: _type = "VirtualMachine" [ 2107.152678] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2107.152948] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d954a9a8-8bbe-44a2-b920-75dcc6d475a1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.160439] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease: (returnval){ [ 2107.160439] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cebc4c-5df6-786f-6d4c-8c16d38fbcbc" [ 2107.160439] env[62510]: _type = "HttpNfcLease" [ 2107.160439] env[62510]: } obtained for exporting VM: (result){ [ 2107.160439] env[62510]: value = "vm-367520" [ 2107.160439] env[62510]: _type = "VirtualMachine" [ 2107.160439] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2107.160831] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the lease: (returnval){ [ 2107.160831] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cebc4c-5df6-786f-6d4c-8c16d38fbcbc" [ 2107.160831] env[62510]: _type = "HttpNfcLease" [ 2107.160831] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2107.167284] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2107.167284] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cebc4c-5df6-786f-6d4c-8c16d38fbcbc" [ 2107.167284] env[62510]: _type = "HttpNfcLease" [ 2107.167284] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2107.669661] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2107.669661] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cebc4c-5df6-786f-6d4c-8c16d38fbcbc" [ 2107.669661] env[62510]: _type = "HttpNfcLease" [ 2107.669661] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2107.670135] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2107.670135] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52cebc4c-5df6-786f-6d4c-8c16d38fbcbc" [ 2107.670135] env[62510]: _type = "HttpNfcLease" [ 2107.670135] env[62510]: }. {{(pid=62510) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2107.670854] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ff4097-f6d0-4f94-8a1d-5984fcf9a77a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.678310] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52982ad3-9446-2968-3f4a-f93aff805308/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2107.678495] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52982ad3-9446-2968-3f4a-f93aff805308/disk-0.vmdk for reading. {{(pid=62510) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2107.767695] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2f5bf31f-470a-4533-9ca0-cf8f1f8bd5ec {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.137583] env[62510]: DEBUG nova.network.neutron [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Port 7afce004-8ac8-4715-a27d-7e5162c006ba binding to destination host cpu-1 is already ACTIVE {{(pid=62510) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2108.137851] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2108.138120] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2108.138390] env[62510]: DEBUG nova.network.neutron [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2109.170381] env[62510]: DEBUG nova.network.neutron [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2109.674079] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2110.177136] env[62510]: DEBUG nova.compute.manager [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62510) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2110.177449] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.177610] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.681512] env[62510]: DEBUG nova.objects.instance [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'migration_context' on Instance uuid 62417973-075e-4128-8eb5-4c62946856e7 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2111.252173] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e55776a-abf6-422e-b59a-76929e8e8012 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.259704] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adbea23-ed22-429e-9261-0eadc4005174 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.291661] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c513a36-23c6-444d-a649-57042982ca88 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.299364] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5c4dd6-d8cd-4710-8e9a-cb53c281ff7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.312977] env[62510]: DEBUG nova.compute.provider_tree [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2111.816282] env[62510]: DEBUG nova.scheduler.client.report [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2112.828707] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.651s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.368524] env[62510]: INFO nova.compute.manager [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Swapping old allocation on dict_keys(['c3653102-341b-4ed1-8b1f-1abaf8aa3e56']) held by migration 5466af3c-25cd-473d-96df-5f910ce643d7 for instance [ 2114.391173] env[62510]: DEBUG nova.scheduler.client.report [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Overwriting current allocation {'allocations': {'c3653102-341b-4ed1-8b1f-1abaf8aa3e56': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 184}}, 'project_id': '5b13a257970e4a9a9f9cfecaaf37d9da', 'user_id': 'e483d7dc32804985bc9af5128670131b', 'consumer_generation': 1} on consumer 62417973-075e-4128-8eb5-4c62946856e7 {{(pid=62510) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 2114.468967] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.469172] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.469354] env[62510]: DEBUG nova.network.neutron [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2114.687072] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52982ad3-9446-2968-3f4a-f93aff805308/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2114.688082] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52237a37-fe29-4735-bbc0-6295e121f924 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.694625] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52982ad3-9446-2968-3f4a-f93aff805308/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2114.694852] env[62510]: ERROR oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52982ad3-9446-2968-3f4a-f93aff805308/disk-0.vmdk due to incomplete transfer. [ 2114.695130] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2345eb7b-6443-4374-8a4c-5ddecd754a6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.703010] env[62510]: DEBUG oslo_vmware.rw_handles [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52982ad3-9446-2968-3f4a-f93aff805308/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2114.703257] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Uploaded image aaa462ff-6524-46df-a138-b1e43c03f689 to the Glance image server {{(pid=62510) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2114.705418] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Destroying the VM {{(pid=62510) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2114.705688] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f5d4c546-785d-4900-9537-62cc06220f64 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.711550] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2114.711550] env[62510]: value = "task-1769866" [ 2114.711550] env[62510]: _type = "Task" [ 2114.711550] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.719447] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769866, 'name': Destroy_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.220792] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769866, 'name': Destroy_Task, 'duration_secs': 0.317045} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.221073] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Destroyed the VM [ 2115.221358] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Deleting Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2115.221599] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-efe70c6e-65f5-4198-9ff9-b36485e7c967 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.228145] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2115.228145] env[62510]: value = "task-1769867" [ 2115.228145] env[62510]: _type = "Task" [ 2115.228145] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.235328] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769867, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.315207] env[62510]: DEBUG nova.network.neutron [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [{"id": "7afce004-8ac8-4715-a27d-7e5162c006ba", "address": "fa:16:3e:b5:da:54", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7afce004-8a", "ovs_interfaceid": "7afce004-8ac8-4715-a27d-7e5162c006ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.739536] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769867, 'name': RemoveSnapshot_Task, 'duration_secs': 0.339799} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.739536] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Deleted Snapshot of the VM instance {{(pid=62510) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2115.739536] env[62510]: DEBUG nova.compute.manager [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2115.739935] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e7b988-27d1-4d44-8f77-cd3b386901d6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.817844] env[62510]: DEBUG oslo_concurrency.lockutils [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-62417973-075e-4128-8eb5-4c62946856e7" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.819044] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2115.819044] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d24b6873-c712-4430-af21-287b6c62185a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.826410] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2115.826410] env[62510]: value = "task-1769868" [ 2115.826410] env[62510]: _type = "Task" [ 2115.826410] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.835696] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769868, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.206791] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2116.252232] env[62510]: INFO nova.compute.manager [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Shelve offloading [ 2116.336320] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769868, 'name': PowerOffVM_Task, 'duration_secs': 0.246974} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.336507] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2116.337209] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2116.337423] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2116.337579] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2116.337760] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2116.337909] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2116.338230] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2116.338306] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2116.338450] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2116.338669] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2116.338768] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2116.338942] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2116.344128] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-910b0ee8-9bc2-450e-8414-088e8f89cea4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.360920] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2116.360920] env[62510]: value = "task-1769869" [ 2116.360920] env[62510]: _type = "Task" [ 2116.360920] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.369053] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769869, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.755943] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2116.756350] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff0f8bf7-7dc6-4ed4-9df7-cb245ed1e8ce {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.764195] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2116.764195] env[62510]: value = "task-1769870" [ 2116.764195] env[62510]: _type = "Task" [ 2116.764195] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.772910] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.871045] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769869, 'name': ReconfigVM_Task, 'duration_secs': 0.183869} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.871865] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e61b0d9-b5b4-4c64-b1b7-16061f87ccca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.890853] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2116.891160] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2116.891373] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2116.891609] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2116.891794] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2116.891978] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2116.892241] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2116.892444] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2116.892666] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2116.893088] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2116.893184] env[62510]: DEBUG nova.virt.hardware [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2116.893954] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50ede48f-bac0-49f5-9038-6c4fcd1d2adf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.899810] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2116.899810] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5200b7f3-c0f1-7484-e84c-aa148f977bc0" [ 2116.899810] env[62510]: _type = "Task" [ 2116.899810] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.908289] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5200b7f3-c0f1-7484-e84c-aa148f977bc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.274863] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2117.275064] env[62510]: DEBUG nova.compute.manager [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2117.275792] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baeb1494-cfe6-446b-bf19-1d1e90f35361 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.281527] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.281690] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.281860] env[62510]: DEBUG nova.network.neutron [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2117.410919] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]5200b7f3-c0f1-7484-e84c-aa148f977bc0, 'name': SearchDatastore_Task, 'duration_secs': 0.02014} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.416284] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2117.416557] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d2b2bd5-9a52-4108-91b4-bd4933d13ad6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.435980] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2117.435980] env[62510]: value = "task-1769871" [ 2117.435980] env[62510]: _type = "Task" [ 2117.435980] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.443866] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769871, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.946499] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769871, 'name': ReconfigVM_Task, 'duration_secs': 0.178167} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.946868] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2117.947555] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643db866-fa64-4135-a021-de28a7e54fd6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.968467] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2117.970561] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58a9f576-21a8-4932-b234-efe4bb75f27c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.988417] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2117.988417] env[62510]: value = "task-1769872" [ 2117.988417] env[62510]: _type = "Task" [ 2117.988417] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.997455] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769872, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.016230] env[62510]: DEBUG nova.network.neutron [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.499057] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769872, 'name': ReconfigVM_Task, 'duration_secs': 0.242504} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.499057] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 62417973-075e-4128-8eb5-4c62946856e7/62417973-075e-4128-8eb5-4c62946856e7.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2118.499627] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90197b36-968d-4f5c-81f0-7c42ba5aa980 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.517353] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93cd61d-f54e-4a31-abfb-d0c69ea58e6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.519857] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.537148] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595de2ee-0da2-4ffc-b6de-3b5a648d3098 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.554905] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57de1b08-09e4-4ff1-a0f8-dae9e0014d86 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.561324] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2118.561552] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3184cfc7-5a4c-400f-bd03-47179f5209a5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.566975] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2118.566975] env[62510]: value = "task-1769873" [ 2118.566975] env[62510]: _type = "Task" [ 2118.566975] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.574870] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769873, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.762273] env[62510]: DEBUG nova.compute.manager [req-56713ac5-839c-4d1e-a25d-cb3d3dcd1e21 req-ecc6abbe-cb68-4f21-a7b9-054debab5d86 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-vif-unplugged-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2118.762536] env[62510]: DEBUG oslo_concurrency.lockutils [req-56713ac5-839c-4d1e-a25d-cb3d3dcd1e21 req-ecc6abbe-cb68-4f21-a7b9-054debab5d86 service nova] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2118.762749] env[62510]: DEBUG oslo_concurrency.lockutils [req-56713ac5-839c-4d1e-a25d-cb3d3dcd1e21 req-ecc6abbe-cb68-4f21-a7b9-054debab5d86 service nova] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.762915] env[62510]: DEBUG oslo_concurrency.lockutils [req-56713ac5-839c-4d1e-a25d-cb3d3dcd1e21 req-ecc6abbe-cb68-4f21-a7b9-054debab5d86 service nova] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.763097] env[62510]: DEBUG nova.compute.manager [req-56713ac5-839c-4d1e-a25d-cb3d3dcd1e21 req-ecc6abbe-cb68-4f21-a7b9-054debab5d86 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] No waiting events found dispatching network-vif-unplugged-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2118.763268] env[62510]: WARNING nova.compute.manager [req-56713ac5-839c-4d1e-a25d-cb3d3dcd1e21 req-ecc6abbe-cb68-4f21-a7b9-054debab5d86 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received unexpected event network-vif-unplugged-47759f10-ede2-4020-b8a8-36effea384c5 for instance with vm_state shelved and task_state shelving_offloading. [ 2118.852772] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2118.853826] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcfd3ba-d302-4122-94d1-fb6c43110e9e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.862517] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2118.862759] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-250fe325-1aa8-4877-a5d5-4a752fa901a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.015016] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2119.015476] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2119.015476] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleting the datastore file [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2119.015749] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f8c779d1-61aa-4c99-8ed7-90b85302f800 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.022540] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2119.022540] env[62510]: value = "task-1769875" [ 2119.022540] env[62510]: _type = "Task" [ 2119.022540] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.030324] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.076137] env[62510]: DEBUG oslo_vmware.api [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769873, 'name': PowerOnVM_Task, 'duration_secs': 0.351567} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.076386] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2119.532895] env[62510]: DEBUG oslo_vmware.api [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139737} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.533095] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2119.533284] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2119.533457] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2119.554413] env[62510]: INFO nova.scheduler.client.report [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted allocations for instance 9f3f72ba-60c9-48fb-917f-197e6fc8faef [ 2120.058931] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.059325] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2120.059456] env[62510]: DEBUG nova.objects.instance [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'resources' on Instance uuid 9f3f72ba-60c9-48fb-917f-197e6fc8faef {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2120.086610] env[62510]: INFO nova.compute.manager [None req-5ef047c8-684b-4956-a14a-0bd4bb94444a tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance to original state: 'active' [ 2120.562430] env[62510]: DEBUG nova.objects.instance [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'numa_topology' on Instance uuid 9f3f72ba-60c9-48fb-917f-197e6fc8faef {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2120.803606] env[62510]: DEBUG nova.compute.manager [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-changed-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2120.803835] env[62510]: DEBUG nova.compute.manager [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing instance network info cache due to event network-changed-47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2120.804479] env[62510]: DEBUG oslo_concurrency.lockutils [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.804782] env[62510]: DEBUG oslo_concurrency.lockutils [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.805022] env[62510]: DEBUG nova.network.neutron [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2121.064518] env[62510]: DEBUG nova.objects.base [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Object Instance<9f3f72ba-60c9-48fb-917f-197e6fc8faef> lazy-loaded attributes: resources,numa_topology {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2121.111178] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78106fc0-88d1-4c63-a440-d0e8d49f1d8b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.119840] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8089d3c-3a1b-4e52-8036-d3ec859bf7cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.151271] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931d9c80-b6e0-48a8-b910-d770bfc03ffd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.158811] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521bdc92-e36c-42b0-8057-7b1d16d053a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.172640] env[62510]: DEBUG nova.compute.provider_tree [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2121.223438] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "62417973-075e-4128-8eb5-4c62946856e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.223701] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.223913] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "62417973-075e-4128-8eb5-4c62946856e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.224119] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.224294] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.226496] env[62510]: INFO nova.compute.manager [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Terminating instance [ 2121.554816] env[62510]: DEBUG nova.network.neutron [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updated VIF entry in instance network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2121.555195] env[62510]: DEBUG nova.network.neutron [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap47759f10-ed", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.675588] env[62510]: DEBUG nova.scheduler.client.report [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2121.730563] env[62510]: DEBUG nova.compute.manager [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2121.730817] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2121.731745] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98670c15-fb47-4075-9243-69fde741afcb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.740216] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2121.740528] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52bdbf00-5cfb-4754-afd5-4d2cce662573 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.747269] env[62510]: DEBUG oslo_vmware.api [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2121.747269] env[62510]: value = "task-1769876" [ 2121.747269] env[62510]: _type = "Task" [ 2121.747269] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.755920] env[62510]: DEBUG oslo_vmware.api [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.801981] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.058721] env[62510]: DEBUG oslo_concurrency.lockutils [req-7134d71d-4bdd-4a75-96fa-9edabe3f041e req-23b3acfa-1e12-4588-bb09-34666425157b service nova] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.181909] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.257197] env[62510]: DEBUG oslo_vmware.api [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769876, 'name': PowerOffVM_Task, 'duration_secs': 0.212508} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.257422] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2122.257593] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2122.257838] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cca25e03-b0b7-49b8-940f-2507cdeafa9e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.326992] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2122.327248] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2122.327445] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleting the datastore file [datastore1] 62417973-075e-4128-8eb5-4c62946856e7 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2122.327717] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cfa70cd-7d79-4b06-b053-781544197aac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.334912] env[62510]: DEBUG oslo_vmware.api [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2122.334912] env[62510]: value = "task-1769878" [ 2122.334912] env[62510]: _type = "Task" [ 2122.334912] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.344072] env[62510]: DEBUG oslo_vmware.api [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769878, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.689592] env[62510]: DEBUG oslo_concurrency.lockutils [None req-6486ce23-3325-4d96-90b3-c105f67d20dd tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.189s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.690490] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.889s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.690696] env[62510]: INFO nova.compute.manager [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Unshelving [ 2122.845313] env[62510]: DEBUG oslo_vmware.api [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769878, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140545} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.845538] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2122.845721] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2122.845896] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2122.846079] env[62510]: INFO nova.compute.manager [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2122.846353] env[62510]: DEBUG oslo.service.loopingcall [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2122.846558] env[62510]: DEBUG nova.compute.manager [-] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2122.846657] env[62510]: DEBUG nova.network.neutron [-] [instance: 62417973-075e-4128-8eb5-4c62946856e7] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2123.206818] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.207052] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 2123.336996] env[62510]: DEBUG nova.compute.manager [req-f8141a0c-b778-4cb5-a469-f2e3c0c1d0d5 req-c8ddd2f7-abc8-4d9a-b790-750bdf4eac94 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Received event network-vif-deleted-7afce004-8ac8-4715-a27d-7e5162c006ba {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2123.337216] env[62510]: INFO nova.compute.manager [req-f8141a0c-b778-4cb5-a469-f2e3c0c1d0d5 req-c8ddd2f7-abc8-4d9a-b790-750bdf4eac94 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Neutron deleted interface 7afce004-8ac8-4715-a27d-7e5162c006ba; detaching it from the instance and deleting it from the info cache [ 2123.337377] env[62510]: DEBUG nova.network.neutron [req-f8141a0c-b778-4cb5-a469-f2e3c0c1d0d5 req-c8ddd2f7-abc8-4d9a-b790-750bdf4eac94 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.714522] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.714711] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.714925] env[62510]: DEBUG nova.objects.instance [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'pci_requests' on Instance uuid 9f3f72ba-60c9-48fb-917f-197e6fc8faef {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2123.816894] env[62510]: DEBUG nova.network.neutron [-] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.839488] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e5e52a0-f38a-435b-938b-955ab0b8d76c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.850037] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afeb3eea-7320-49bb-8f82-667c3b9297c9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.874928] env[62510]: DEBUG nova.compute.manager [req-f8141a0c-b778-4cb5-a469-f2e3c0c1d0d5 req-c8ddd2f7-abc8-4d9a-b790-750bdf4eac94 service nova] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Detach interface failed, port_id=7afce004-8ac8-4715-a27d-7e5162c006ba, reason: Instance 62417973-075e-4128-8eb5-4c62946856e7 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2124.202812] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2124.218924] env[62510]: DEBUG nova.objects.instance [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'numa_topology' on Instance uuid 9f3f72ba-60c9-48fb-917f-197e6fc8faef {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2124.319659] env[62510]: INFO nova.compute.manager [-] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Took 1.47 seconds to deallocate network for instance. [ 2124.721388] env[62510]: INFO nova.compute.claims [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2124.826062] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.207442] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2125.777719] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1771866a-0fee-40ae-92f0-2bef0158d300 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.785833] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb77aa39-4477-444d-bd60-51aeb27aee76 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.815405] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbfdd80-e067-456c-ae8f-e9a7b74c6652 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.822693] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f476d94f-f712-45b7-80b8-c861f23281be {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.835652] env[62510]: DEBUG nova.compute.provider_tree [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2126.207369] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.207631] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2126.338636] env[62510]: DEBUG nova.scheduler.client.report [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2126.710555] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.843865] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.129s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.846133] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.020s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.846357] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.848092] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.138s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.848271] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.848431] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2126.849653] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80f7159-5669-471f-aff1-35f9dde49ac8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.858160] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91d0986-6039-4a83-bb1c-02cb7652dc24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.874671] env[62510]: INFO nova.scheduler.client.report [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted allocations for instance 62417973-075e-4128-8eb5-4c62946856e7 [ 2126.876388] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c160bbf9-6fdd-4376-89b4-a36d37f8c2cc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.881574] env[62510]: INFO nova.network.neutron [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating port 47759f10-ede2-4020-b8a8-36effea384c5 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2126.888448] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6281057f-8bb3-40c2-ac6d-88f509228ec8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.918241] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180963MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2126.918400] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2126.918579] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.387109] env[62510]: DEBUG oslo_concurrency.lockutils [None req-c71eca6b-89b6-4603-97ab-321f7be53d0c tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "62417973-075e-4128-8eb5-4c62946856e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.163s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.939776] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance ef20eba1-cb3a-4e0f-bbdb-54949e409546 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2127.940238] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 9f3f72ba-60c9-48fb-917f-197e6fc8faef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2128.281166] env[62510]: DEBUG nova.compute.manager [req-052341d3-5ee3-49aa-8384-3bfe228d14a3 req-b5aa22a1-bc94-4f14-8d13-cdecb2474daa service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-vif-plugged-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2128.281166] env[62510]: DEBUG oslo_concurrency.lockutils [req-052341d3-5ee3-49aa-8384-3bfe228d14a3 req-b5aa22a1-bc94-4f14-8d13-cdecb2474daa service nova] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.281166] env[62510]: DEBUG oslo_concurrency.lockutils [req-052341d3-5ee3-49aa-8384-3bfe228d14a3 req-b5aa22a1-bc94-4f14-8d13-cdecb2474daa service nova] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.281166] env[62510]: DEBUG oslo_concurrency.lockutils [req-052341d3-5ee3-49aa-8384-3bfe228d14a3 req-b5aa22a1-bc94-4f14-8d13-cdecb2474daa service nova] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.281732] env[62510]: DEBUG nova.compute.manager [req-052341d3-5ee3-49aa-8384-3bfe228d14a3 req-b5aa22a1-bc94-4f14-8d13-cdecb2474daa service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] No waiting events found dispatching network-vif-plugged-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2128.282054] env[62510]: WARNING nova.compute.manager [req-052341d3-5ee3-49aa-8384-3bfe228d14a3 req-b5aa22a1-bc94-4f14-8d13-cdecb2474daa service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received unexpected event network-vif-plugged-47759f10-ede2-4020-b8a8-36effea384c5 for instance with vm_state shelved_offloaded and task_state spawning. [ 2128.360308] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2128.360308] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2128.360564] env[62510]: DEBUG nova.network.neutron [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2128.372678] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.372678] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.443423] env[62510]: INFO nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 has allocations against this compute host but is not found in the database. [ 2128.443845] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2128.443845] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2128.487811] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e38bae-ca0c-41ef-ba1e-7090626be1d3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.495224] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfffa082-f906-4496-89d8-a79bc1f0f818 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.524446] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bf9ba3-f3ee-43ad-9083-b4dc2646a0df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.531781] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ab5293e-96f2-4924-8a06-277d9af219d2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.544677] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2128.674724] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.674960] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.875282] env[62510]: DEBUG nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2129.047611] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2129.178709] env[62510]: DEBUG nova.compute.utils [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2129.279302] env[62510]: DEBUG nova.network.neutron [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.397880] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.552853] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2129.553056] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.634s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.553271] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.156s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.554883] env[62510]: INFO nova.compute.claims [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2129.557727] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2129.557876] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances with incomplete migration {{(pid=62510) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11647}} [ 2129.681080] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.782375] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2129.809235] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='c82f455a647c3458fc3114103cce12cc',container_format='bare',created_at=2024-12-11T19:46:33Z,direct_url=,disk_format='vmdk',id=aaa462ff-6524-46df-a138-b1e43c03f689,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-924274453-shelved',owner='bae4f0adee8c4c28add1849316448538',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-12-11T19:46:46Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2129.809511] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2129.809669] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2129.809852] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2129.809998] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2129.810176] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2129.810444] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2129.810610] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2129.810779] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2129.810939] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2129.811122] env[62510]: DEBUG nova.virt.hardware [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2129.812017] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b233e8f-2f9a-4a3e-b99b-3ba6cace1bae {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.820031] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67884894-9fe4-4d12-82bd-d916306d5778 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.833231] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:5c:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dced2f3d-7fd3-4a42-836d-9f02dab4c949', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47759f10-ede2-4020-b8a8-36effea384c5', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2129.840434] env[62510]: DEBUG oslo.service.loopingcall [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2129.840686] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2129.840884] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c27cef88-0954-495f-b094-571402ea1f2d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.860854] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2129.860854] env[62510]: value = "task-1769879" [ 2129.860854] env[62510]: _type = "Task" [ 2129.860854] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.867958] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769879, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.306024] env[62510]: DEBUG nova.compute.manager [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-changed-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2130.306272] env[62510]: DEBUG nova.compute.manager [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing instance network info cache due to event network-changed-47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2130.306417] env[62510]: DEBUG oslo_concurrency.lockutils [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.306561] env[62510]: DEBUG oslo_concurrency.lockutils [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.306722] env[62510]: DEBUG nova.network.neutron [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Refreshing network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2130.371310] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769879, 'name': CreateVM_Task, 'duration_secs': 0.30174} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.371493] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2130.372136] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.372301] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.372737] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2130.372993] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1428d7b-2643-4f53-b166-16535e4b40e7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.378104] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2130.378104] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52169c92-642a-c4a0-da0c-728745cf9584" [ 2130.378104] env[62510]: _type = "Task" [ 2130.378104] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.387627] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52169c92-642a-c4a0-da0c-728745cf9584, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.608646] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce6183f-fcff-4858-b05e-e08b090e4ba9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.616286] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6eb0088-d1a8-4eb1-befd-e990ffe8b4dc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.646550] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bb0926-2ee6-41c5-a5e4-b972e131ecd1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.653747] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39938661-6935-4345-9eb2-d9433a8c9ce8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.666336] env[62510]: DEBUG nova.compute.provider_tree [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2130.747204] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.747433] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.747658] env[62510]: INFO nova.compute.manager [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Attaching volume e810f407-c27d-4fd3-bd74-8361f86592df to /dev/sdb [ 2130.778267] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99f68e9-e26d-46d0-b47b-aedb17406617 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.785503] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bf3d37-f1a0-4327-9508-7fe2f4f7c868 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.798397] env[62510]: DEBUG nova.virt.block_device [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Updating existing volume attachment record: 9220a862-dcee-458a-bf6f-a92753a6f45e {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2130.889854] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2130.890199] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Processing image aaa462ff-6524-46df-a138-b1e43c03f689 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2130.890529] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689/aaa462ff-6524-46df-a138-b1e43c03f689.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.890737] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689/aaa462ff-6524-46df-a138-b1e43c03f689.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.891029] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2130.891346] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3615c53c-ea90-4be1-a572-ba5cc34a9776 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.910037] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2130.910265] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2130.910965] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eeaebd9-cd8b-4ddf-bd74-22caa2fe59a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.915873] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2130.915873] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52616513-fbf9-8d65-da76-6b9eaff14295" [ 2130.915873] env[62510]: _type = "Task" [ 2130.915873] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.923688] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52616513-fbf9-8d65-da76-6b9eaff14295, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2131.062290] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.062517] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2131.082233] env[62510]: DEBUG nova.network.neutron [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updated VIF entry in instance network info cache for port 47759f10-ede2-4020-b8a8-36effea384c5. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2131.082598] env[62510]: DEBUG nova.network.neutron [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.169137] env[62510]: DEBUG nova.scheduler.client.report [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2131.426359] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Preparing fetch location {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2131.426778] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Fetch image to [datastore1] OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18/OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18.vmdk {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2131.426778] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Downloading stream optimized image aaa462ff-6524-46df-a138-b1e43c03f689 to [datastore1] OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18/OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18.vmdk on the data store datastore1 as vApp {{(pid=62510) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2131.427029] env[62510]: DEBUG nova.virt.vmwareapi.images [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Downloading image file data aaa462ff-6524-46df-a138-b1e43c03f689 to the ESX as VM named 'OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18' {{(pid=62510) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2131.491623] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2131.491623] env[62510]: value = "resgroup-9" [ 2131.491623] env[62510]: _type = "ResourcePool" [ 2131.491623] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2131.491897] env[62510]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9cc8a7af-42ef-4792-a749-404551c43105 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.512919] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease: (returnval){ [ 2131.512919] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5207dbb7-e98e-6722-cff2-62f7be2f8152" [ 2131.512919] env[62510]: _type = "HttpNfcLease" [ 2131.512919] env[62510]: } obtained for vApp import into resource pool (val){ [ 2131.512919] env[62510]: value = "resgroup-9" [ 2131.512919] env[62510]: _type = "ResourcePool" [ 2131.512919] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2131.513277] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the lease: (returnval){ [ 2131.513277] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5207dbb7-e98e-6722-cff2-62f7be2f8152" [ 2131.513277] env[62510]: _type = "HttpNfcLease" [ 2131.513277] env[62510]: } to be ready. {{(pid=62510) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2131.518965] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2131.518965] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5207dbb7-e98e-6722-cff2-62f7be2f8152" [ 2131.518965] env[62510]: _type = "HttpNfcLease" [ 2131.518965] env[62510]: } is initializing. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2131.585094] env[62510]: DEBUG oslo_concurrency.lockutils [req-d44b8f3f-808b-41b6-85fe-7ee4321aa314 req-5e8cd9fc-1d69-42f3-92d4-836866ee9890 service nova] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2131.673885] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.120s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.674506] env[62510]: DEBUG nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2132.021883] env[62510]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2132.021883] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5207dbb7-e98e-6722-cff2-62f7be2f8152" [ 2132.021883] env[62510]: _type = "HttpNfcLease" [ 2132.021883] env[62510]: } is ready. {{(pid=62510) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2132.022250] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2132.022250] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]5207dbb7-e98e-6722-cff2-62f7be2f8152" [ 2132.022250] env[62510]: _type = "HttpNfcLease" [ 2132.022250] env[62510]: }. {{(pid=62510) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2132.022950] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4a07e1-6b72-44b7-8f67-2edc073ca2ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.030114] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ca3c47-d944-6c76-ca35-a5e73c4b4c13/disk-0.vmdk from lease info. {{(pid=62510) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2132.030432] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ca3c47-d944-6c76-ca35-a5e73c4b4c13/disk-0.vmdk. {{(pid=62510) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2132.086654] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.086813] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.087065] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2132.093592] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1e6390fe-39d7-47ea-8f4a-3576f063e2bf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.179397] env[62510]: DEBUG nova.compute.utils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2132.180752] env[62510]: DEBUG nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2132.180930] env[62510]: DEBUG nova.network.neutron [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2132.218813] env[62510]: DEBUG nova.policy [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e483d7dc32804985bc9af5128670131b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b13a257970e4a9a9f9cfecaaf37d9da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2132.494257] env[62510]: DEBUG nova.network.neutron [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Successfully created port: 3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2132.684793] env[62510]: DEBUG nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2133.227582] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Completed reading data from the image iterator. {{(pid=62510) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2133.228135] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ca3c47-d944-6c76-ca35-a5e73c4b4c13/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2133.228931] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90bdbbc-ea94-494a-8ac6-75ba937a5001 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.239490] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ca3c47-d944-6c76-ca35-a5e73c4b4c13/disk-0.vmdk is in state: ready. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2133.239656] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ca3c47-d944-6c76-ca35-a5e73c4b4c13/disk-0.vmdk. {{(pid=62510) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2133.240110] env[62510]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a6c202b5-f524-454a-bbb9-27037d8ae4ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.426164] env[62510]: DEBUG oslo_vmware.rw_handles [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52ca3c47-d944-6c76-ca35-a5e73c4b4c13/disk-0.vmdk. {{(pid=62510) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2133.426363] env[62510]: INFO nova.virt.vmwareapi.images [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Downloaded image file data aaa462ff-6524-46df-a138-b1e43c03f689 [ 2133.427297] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035e2c9d-7b9c-455f-9620-764f3d7ed0b0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.443883] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18cea69b-ddac-4f00-82c9-33d3fbaa2358 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.499015] env[62510]: INFO nova.virt.vmwareapi.images [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] The imported VM was unregistered [ 2133.501356] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Caching image {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2133.501606] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Creating directory with path [datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2133.501886] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d36a00a-a977-4280-b00d-c5a2935648a6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.511979] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Created directory with path [datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689 {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2133.512175] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18/OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18.vmdk to [datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689/aaa462ff-6524-46df-a138-b1e43c03f689.vmdk. {{(pid=62510) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2133.512429] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-fb90aeec-1ea0-4008-993a-2dacbdfba484 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.517850] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [{"id": "47759f10-ede2-4020-b8a8-36effea384c5", "address": "fa:16:3e:e0:5c:8d", "network": {"id": "4c55d05c-607e-4972-898f-4aacefeddfdb", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1391357384-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bae4f0adee8c4c28add1849316448538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dced2f3d-7fd3-4a42-836d-9f02dab4c949", "external-id": "nsx-vlan-transportzone-117", "segmentation_id": 117, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47759f10-ed", "ovs_interfaceid": "47759f10-ede2-4020-b8a8-36effea384c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.519966] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2133.519966] env[62510]: value = "task-1769884" [ 2133.519966] env[62510]: _type = "Task" [ 2133.519966] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.527755] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769884, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.697649] env[62510]: DEBUG nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2133.724640] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T19:20:20Z,direct_url=,disk_format='vmdk',id=645af513-c243-4722-b631-714f21477ae6,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='06084a351fb546e09252574b82e81812',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T19:20:21Z,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2133.724890] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2133.725090] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2133.725295] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2133.725446] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2133.725594] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2133.725808] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2133.725965] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2133.726152] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2133.726336] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2133.726533] env[62510]: DEBUG nova.virt.hardware [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2133.727437] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25eacb04-96e8-4dbf-8ddd-0960a50c7dc6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.737932] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151a6ff9-908f-47fe-9158-25a299fa81a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.918733] env[62510]: DEBUG nova.compute.manager [req-b074b312-0bdd-4817-b9e1-a2097b3edd50 req-d2458878-3ba1-491c-be0e-a9c579d4ea7b service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Received event network-vif-plugged-3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2133.918833] env[62510]: DEBUG oslo_concurrency.lockutils [req-b074b312-0bdd-4817-b9e1-a2097b3edd50 req-d2458878-3ba1-491c-be0e-a9c579d4ea7b service nova] Acquiring lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.919203] env[62510]: DEBUG oslo_concurrency.lockutils [req-b074b312-0bdd-4817-b9e1-a2097b3edd50 req-d2458878-3ba1-491c-be0e-a9c579d4ea7b service nova] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.919467] env[62510]: DEBUG oslo_concurrency.lockutils [req-b074b312-0bdd-4817-b9e1-a2097b3edd50 req-d2458878-3ba1-491c-be0e-a9c579d4ea7b service nova] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.919661] env[62510]: DEBUG nova.compute.manager [req-b074b312-0bdd-4817-b9e1-a2097b3edd50 req-d2458878-3ba1-491c-be0e-a9c579d4ea7b service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] No waiting events found dispatching network-vif-plugged-3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2133.919835] env[62510]: WARNING nova.compute.manager [req-b074b312-0bdd-4817-b9e1-a2097b3edd50 req-d2458878-3ba1-491c-be0e-a9c579d4ea7b service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Received unexpected event network-vif-plugged-3ba21c65-0774-4217-9f6c-e8399b1f82db for instance with vm_state building and task_state spawning. [ 2134.010687] env[62510]: DEBUG nova.network.neutron [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Successfully updated port: 3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2134.021871] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-9f3f72ba-60c9-48fb-917f-197e6fc8faef" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.022063] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 2134.023134] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2134.026899] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2134.034886] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769884, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.543093] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.543093] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.543093] env[62510]: DEBUG nova.network.neutron [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2134.543093] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769884, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.033582] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769884, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.050542] env[62510]: DEBUG nova.network.neutron [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2135.224208] env[62510]: DEBUG nova.network.neutron [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updating instance_info_cache with network_info: [{"id": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "address": "fa:16:3e:31:77:2e", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ba21c65-07", "ovs_interfaceid": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.343774] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2135.344041] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367522', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'name': 'volume-e810f407-c27d-4fd3-bd74-8361f86592df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ef20eba1-cb3a-4e0f-bbdb-54949e409546', 'attached_at': '', 'detached_at': '', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'serial': 'e810f407-c27d-4fd3-bd74-8361f86592df'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2135.345091] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875287ad-c66d-4470-9e37-08de5d782d40 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.364802] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec16576-49bf-441f-b387-7cd733d52b96 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.394987] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] volume-e810f407-c27d-4fd3-bd74-8361f86592df/volume-e810f407-c27d-4fd3-bd74-8361f86592df.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2135.395336] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2303e2d-7890-4713-a27d-5c697cb7861c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.416115] env[62510]: DEBUG oslo_vmware.api [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2135.416115] env[62510]: value = "task-1769885" [ 2135.416115] env[62510]: _type = "Task" [ 2135.416115] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.427336] env[62510]: DEBUG oslo_vmware.api [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769885, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.535899] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769884, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.728076] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.728076] env[62510]: DEBUG nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Instance network_info: |[{"id": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "address": "fa:16:3e:31:77:2e", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ba21c65-07", "ovs_interfaceid": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2135.728513] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:77:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '73b1ea51-8078-4169-921e-d5a224120ab4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ba21c65-0774-4217-9f6c-e8399b1f82db', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2135.736104] env[62510]: DEBUG oslo.service.loopingcall [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2135.736317] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2135.736540] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-260b772d-9676-410a-a467-75478ca3b5a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.755756] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2135.755756] env[62510]: value = "task-1769886" [ 2135.755756] env[62510]: _type = "Task" [ 2135.755756] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.763169] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769886, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.926276] env[62510]: DEBUG oslo_vmware.api [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.944776] env[62510]: DEBUG nova.compute.manager [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Received event network-changed-3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2135.944977] env[62510]: DEBUG nova.compute.manager [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Refreshing instance network info cache due to event network-changed-3ba21c65-0774-4217-9f6c-e8399b1f82db. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2135.945177] env[62510]: DEBUG oslo_concurrency.lockutils [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] Acquiring lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.945328] env[62510]: DEBUG oslo_concurrency.lockutils [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] Acquired lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2135.945532] env[62510]: DEBUG nova.network.neutron [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Refreshing network info cache for port 3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2136.034973] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769884, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.212073} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.035287] env[62510]: INFO nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18/OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18.vmdk to [datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689/aaa462ff-6524-46df-a138-b1e43c03f689.vmdk. [ 2136.035481] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Cleaning up location [datastore1] OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2136.035647] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_e4f020f7-cd23-4ce0-a459-30b4a19eba18 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2136.035901] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01e4eac3-22b8-4842-b956-686cc76b885d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.042653] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2136.042653] env[62510]: value = "task-1769887" [ 2136.042653] env[62510]: _type = "Task" [ 2136.042653] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.050022] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.265613] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769886, 'name': CreateVM_Task, 'duration_secs': 0.336789} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.265822] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2136.266403] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.266578] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.266900] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2136.267162] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4fce9db-3ef7-4e0d-9237-3a59274e985a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.271439] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2136.271439] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]524848e2-6302-4204-ca0c-6f0a9aced4eb" [ 2136.271439] env[62510]: _type = "Task" [ 2136.271439] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.278561] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524848e2-6302-4204-ca0c-6f0a9aced4eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.426324] env[62510]: DEBUG oslo_vmware.api [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769885, 'name': ReconfigVM_Task, 'duration_secs': 0.571003} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.426601] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Reconfigured VM instance instance-0000007d to attach disk [datastore1] volume-e810f407-c27d-4fd3-bd74-8361f86592df/volume-e810f407-c27d-4fd3-bd74-8361f86592df.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2136.431270] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a463d50-9495-4e46-903f-3aabd8d8ad4e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.446556] env[62510]: DEBUG oslo_vmware.api [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2136.446556] env[62510]: value = "task-1769888" [ 2136.446556] env[62510]: _type = "Task" [ 2136.446556] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.455958] env[62510]: DEBUG oslo_vmware.api [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.552142] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109284} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.552402] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2136.552570] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689/aaa462ff-6524-46df-a138-b1e43c03f689.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2136.552805] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689/aaa462ff-6524-46df-a138-b1e43c03f689.vmdk to [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2136.553069] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a14b079-396e-44c9-9359-58035097cec6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.559594] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2136.559594] env[62510]: value = "task-1769889" [ 2136.559594] env[62510]: _type = "Task" [ 2136.559594] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.568054] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.714615] env[62510]: DEBUG nova.network.neutron [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updated VIF entry in instance network info cache for port 3ba21c65-0774-4217-9f6c-e8399b1f82db. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2136.715035] env[62510]: DEBUG nova.network.neutron [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updating instance_info_cache with network_info: [{"id": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "address": "fa:16:3e:31:77:2e", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ba21c65-07", "ovs_interfaceid": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2136.782247] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]524848e2-6302-4204-ca0c-6f0a9aced4eb, 'name': SearchDatastore_Task, 'duration_secs': 0.013326} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.782559] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2136.782738] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Processing image 645af513-c243-4722-b631-714f21477ae6 {{(pid=62510) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2136.782972] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.783131] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.783305] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2136.783548] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f96eb576-7603-4a14-95b8-695f373999e1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.791246] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62510) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2136.791410] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62510) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2136.792076] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a81e9469-afa1-4121-98af-ca6c5754a051 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.797614] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2136.797614] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]522a6f31-a36d-1e04-2e0a-22402544e1c1" [ 2136.797614] env[62510]: _type = "Task" [ 2136.797614] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.804990] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522a6f31-a36d-1e04-2e0a-22402544e1c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.956032] env[62510]: DEBUG oslo_vmware.api [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769888, 'name': ReconfigVM_Task, 'duration_secs': 0.251117} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.956375] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367522', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'name': 'volume-e810f407-c27d-4fd3-bd74-8361f86592df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ef20eba1-cb3a-4e0f-bbdb-54949e409546', 'attached_at': '', 'detached_at': '', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'serial': 'e810f407-c27d-4fd3-bd74-8361f86592df'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2137.070966] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769889, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.207382] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2137.207671] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Cleaning up deleted instances {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11609}} [ 2137.218430] env[62510]: DEBUG oslo_concurrency.lockutils [req-151d4033-6ebe-45ad-b6ba-d3c0af17700e req-fc586179-1871-40a2-8317-4251e3783257 service nova] Releasing lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.313785] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]522a6f31-a36d-1e04-2e0a-22402544e1c1, 'name': SearchDatastore_Task, 'duration_secs': 0.025315} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.314719] env[62510]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-085c4b9c-5261-4260-b510-d7b5649d5dbe {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.322494] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2137.322494] env[62510]: value = "session[528e9567-5f6b-75c7-e952-406003a8e250]52194d07-6e78-f869-65de-ca253ebdf370" [ 2137.322494] env[62510]: _type = "Task" [ 2137.322494] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.333982] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52194d07-6e78-f869-65de-ca253ebdf370, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.571567] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769889, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.723423] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] There are 45 instances to clean {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11618}} [ 2137.723622] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 62417973-075e-4128-8eb5-4c62946856e7] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2137.834906] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': session[528e9567-5f6b-75c7-e952-406003a8e250]52194d07-6e78-f869-65de-ca253ebdf370, 'name': SearchDatastore_Task, 'duration_secs': 0.079574} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.835237] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "[datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.835497] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3/70ed3d3b-d436-49f3-8d17-ced3ada4d1e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2137.835772] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2e0117b7-cea4-4081-a3eb-697b6deca8e3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.844351] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2137.844351] env[62510]: value = "task-1769890" [ 2137.844351] env[62510]: _type = "Task" [ 2137.844351] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.854509] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.997497] env[62510]: DEBUG nova.objects.instance [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'flavor' on Instance uuid ef20eba1-cb3a-4e0f-bbdb-54949e409546 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2138.072215] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769889, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.228079] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 94dd7eee-f799-4fb5-854b-e7d59621b125] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2138.355914] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.502581] env[62510]: DEBUG oslo_concurrency.lockutils [None req-3c6c9644-4d97-42b4-a7ca-bc2ba8e11e09 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.755s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2138.573215] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769889, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.730890] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: f9dc3ae0-a004-4baf-a972-e4480774cc3f] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2138.858022] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.072956] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769889, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.432625} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.072956] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/aaa462ff-6524-46df-a138-b1e43c03f689/aaa462ff-6524-46df-a138-b1e43c03f689.vmdk to [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2139.073668] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65716bed-f9db-4866-8a48-4865db062956 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.096089] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2139.096416] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73cbd6c7-4abe-4cc2-b47b-4c4a779130cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.117413] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2139.117413] env[62510]: value = "task-1769891" [ 2139.117413] env[62510]: _type = "Task" [ 2139.117413] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.127345] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769891, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.219154] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.219411] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.234817] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 31a181cd-b7cd-42c0-960d-e7d28987dc19] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2139.356276] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769890, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.458474} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.356581] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/645af513-c243-4722-b631-714f21477ae6/645af513-c243-4722-b631-714f21477ae6.vmdk to [datastore1] 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3/70ed3d3b-d436-49f3-8d17-ced3ada4d1e3.vmdk {{(pid=62510) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2139.356836] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Extending root virtual disk to 1048576 {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2139.357137] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd47382b-8862-42d8-8ec8-612237d71e23 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.363939] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2139.363939] env[62510]: value = "task-1769892" [ 2139.363939] env[62510]: _type = "Task" [ 2139.363939] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.371256] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.629268] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769891, 'name': ReconfigVM_Task, 'duration_secs': 0.375836} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.629493] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef/9f3f72ba-60c9-48fb-917f-197e6fc8faef.vmdk or device None with type streamOptimized {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2139.630065] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-292ea12c-ccc8-4bde-8773-f44a9dbe39cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.636133] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2139.636133] env[62510]: value = "task-1769893" [ 2139.636133] env[62510]: _type = "Task" [ 2139.636133] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.643311] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769893, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.722394] env[62510]: INFO nova.compute.manager [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Detaching volume e810f407-c27d-4fd3-bd74-8361f86592df [ 2139.738338] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: f39e74c3-eb58-4d28-a489-73d2de1e9bef] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2139.754935] env[62510]: INFO nova.virt.block_device [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Attempting to driver detach volume e810f407-c27d-4fd3-bd74-8361f86592df from mountpoint /dev/sdb [ 2139.755187] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2139.755375] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367522', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'name': 'volume-e810f407-c27d-4fd3-bd74-8361f86592df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ef20eba1-cb3a-4e0f-bbdb-54949e409546', 'attached_at': '', 'detached_at': '', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'serial': 'e810f407-c27d-4fd3-bd74-8361f86592df'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2139.756269] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d24a1d-73e5-4a7a-85e8-5b591a9c52d7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.778954] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1c50b1-5c0f-44c8-a617-ddfde3affec0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.785704] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18eb1967-c423-41a0-b17d-d22ebea1fbc1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.805315] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4600d4-920a-46b6-a592-f1e1068d5257 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.819501] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] The volume has not been displaced from its original location: [datastore1] volume-e810f407-c27d-4fd3-bd74-8361f86592df/volume-e810f407-c27d-4fd3-bd74-8361f86592df.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2139.824674] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Reconfiguring VM instance instance-0000007d to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2139.825197] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49106724-1d35-4616-9e75-1224b817dc70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.842361] env[62510]: DEBUG oslo_vmware.api [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2139.842361] env[62510]: value = "task-1769894" [ 2139.842361] env[62510]: _type = "Task" [ 2139.842361] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.851847] env[62510]: DEBUG oslo_vmware.api [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769894, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.872949] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064246} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.873285] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Extended root virtual disk {{(pid=62510) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2139.873916] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0d105f-c683-4f50-a9aa-b9945777d2ee {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.894792] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3/70ed3d3b-d436-49f3-8d17-ced3ada4d1e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2139.894968] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7ffe27d-3aae-4612-ae58-3256c1797f57 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.916371] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2139.916371] env[62510]: value = "task-1769895" [ 2139.916371] env[62510]: _type = "Task" [ 2139.916371] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.923950] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.146161] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769893, 'name': Rename_Task, 'duration_secs': 0.131565} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.146399] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2140.146646] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2347cde4-2e06-42cd-ab4c-edfae01e495e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.153597] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2140.153597] env[62510]: value = "task-1769896" [ 2140.153597] env[62510]: _type = "Task" [ 2140.153597] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.161200] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.241962] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: bf62d0ce-c0e6-4a77-ab05-ac912ec5530f] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2140.352480] env[62510]: DEBUG oslo_vmware.api [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769894, 'name': ReconfigVM_Task, 'duration_secs': 0.221531} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.352754] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Reconfigured VM instance instance-0000007d to detach disk 2001 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2140.357276] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46fb4cbc-5816-43ec-bf24-0a1111b144e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.371990] env[62510]: DEBUG oslo_vmware.api [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2140.371990] env[62510]: value = "task-1769897" [ 2140.371990] env[62510]: _type = "Task" [ 2140.371990] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.379438] env[62510]: DEBUG oslo_vmware.api [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769897, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.426210] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769895, 'name': ReconfigVM_Task, 'duration_secs': 0.294039} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.426450] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3/70ed3d3b-d436-49f3-8d17-ced3ada4d1e3.vmdk or device None with type sparse {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2140.427124] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9dd67421-fe70-4ff8-9399-3e4b3be236ac {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.433984] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2140.433984] env[62510]: value = "task-1769898" [ 2140.433984] env[62510]: _type = "Task" [ 2140.433984] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.442081] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769898, 'name': Rename_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.663623] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769896, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.745386] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: abf15987-86cc-4fdc-be9a-efd0448ce9ca] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2140.882383] env[62510]: DEBUG oslo_vmware.api [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769897, 'name': ReconfigVM_Task, 'duration_secs': 0.135996} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.882726] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367522', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'name': 'volume-e810f407-c27d-4fd3-bd74-8361f86592df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ef20eba1-cb3a-4e0f-bbdb-54949e409546', 'attached_at': '', 'detached_at': '', 'volume_id': 'e810f407-c27d-4fd3-bd74-8361f86592df', 'serial': 'e810f407-c27d-4fd3-bd74-8361f86592df'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2140.943812] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769898, 'name': Rename_Task, 'duration_secs': 0.144271} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.944102] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2140.944363] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92ecb497-e35a-4342-8af5-9fcc42c91fc8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.951555] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2140.951555] env[62510]: value = "task-1769899" [ 2140.951555] env[62510]: _type = "Task" [ 2140.951555] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.962383] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.166768] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769896, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.248737] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 14a54dac-d2b8-4618-86c8-ab2d08bae005] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2141.425836] env[62510]: DEBUG nova.objects.instance [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'flavor' on Instance uuid ef20eba1-cb3a-4e0f-bbdb-54949e409546 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2141.462395] env[62510]: DEBUG oslo_vmware.api [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769899, 'name': PowerOnVM_Task, 'duration_secs': 0.492485} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.462684] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2141.462890] env[62510]: INFO nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Took 7.77 seconds to spawn the instance on the hypervisor. [ 2141.463150] env[62510]: DEBUG nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2141.464009] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fcacb9-cc59-4c0c-bda5-3da94a23d86b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.664338] env[62510]: DEBUG oslo_vmware.api [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769896, 'name': PowerOnVM_Task, 'duration_secs': 1.191051} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.664610] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2141.752319] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 7b2bcec4-6df7-4591-ac02-9da04d185756] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2141.758622] env[62510]: DEBUG nova.compute.manager [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2141.759480] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d96fc1-6c01-4c9a-94ef-3e40c69bfc70 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.981421] env[62510]: INFO nova.compute.manager [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Took 12.60 seconds to build instance. [ 2142.255918] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 22002fc1-647e-4e65-a5f0-c3a34575985f] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2142.275821] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a91eb875-295d-40e8-8a0a-19e45a29bf95 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.585s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.432875] env[62510]: DEBUG oslo_concurrency.lockutils [None req-ab10bb13-2d4d-4bc4-ad15-9c1d07f27210 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.213s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.483249] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8be88753-8bf6-4815-a73b-7b388a649e34 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.110s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.760987] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: fe3b3380-69bb-4563-abf2-9f0db439d31a] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2142.959553] env[62510]: DEBUG nova.compute.manager [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Received event network-changed-3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2142.960029] env[62510]: DEBUG nova.compute.manager [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Refreshing instance network info cache due to event network-changed-3ba21c65-0774-4217-9f6c-e8399b1f82db. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2142.960029] env[62510]: DEBUG oslo_concurrency.lockutils [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] Acquiring lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2142.960119] env[62510]: DEBUG oslo_concurrency.lockutils [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] Acquired lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2142.960259] env[62510]: DEBUG nova.network.neutron [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Refreshing network info cache for port 3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2143.112862] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.113139] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.113330] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.113515] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.113688] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.115927] env[62510]: INFO nova.compute.manager [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Terminating instance [ 2143.264132] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: a5a9c086-6ae2-4644-acfa-7c147593b8d2] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2143.483174] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.483442] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.483653] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.483836] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.484019] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.486538] env[62510]: INFO nova.compute.manager [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Terminating instance [ 2143.619677] env[62510]: DEBUG nova.compute.manager [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2143.619900] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2143.620941] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b769dc8-9cd4-4698-b185-d15bc85789cf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.629265] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2143.629513] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40f89f9e-a261-4df0-b0bc-ef02c195dacd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.636411] env[62510]: DEBUG oslo_vmware.api [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2143.636411] env[62510]: value = "task-1769900" [ 2143.636411] env[62510]: _type = "Task" [ 2143.636411] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.646833] env[62510]: DEBUG oslo_vmware.api [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769900, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.681646] env[62510]: DEBUG nova.network.neutron [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updated VIF entry in instance network info cache for port 3ba21c65-0774-4217-9f6c-e8399b1f82db. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2143.682080] env[62510]: DEBUG nova.network.neutron [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updating instance_info_cache with network_info: [{"id": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "address": "fa:16:3e:31:77:2e", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ba21c65-07", "ovs_interfaceid": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.767461] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 11490e72-b9a5-4e8e-86c4-300c594cd914] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2143.990315] env[62510]: DEBUG nova.compute.manager [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2143.990523] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2143.991446] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8013e4c-3e9f-46da-8067-e3dbc4a94418 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.999410] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2143.999639] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a4a7f03-0131-4bb0-a02a-975e6709a050 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.006905] env[62510]: DEBUG oslo_vmware.api [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2144.006905] env[62510]: value = "task-1769901" [ 2144.006905] env[62510]: _type = "Task" [ 2144.006905] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.014904] env[62510]: DEBUG oslo_vmware.api [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769901, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.148229] env[62510]: DEBUG oslo_vmware.api [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769900, 'name': PowerOffVM_Task, 'duration_secs': 0.178076} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.148564] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2144.148684] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2144.148921] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c01c4c5c-3098-48c7-b01a-6a8629a2f99d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.184572] env[62510]: DEBUG oslo_concurrency.lockutils [req-96e29c30-d38b-457a-a193-ad798d2021fc req-8eafc67f-b013-4daf-8a6d-623e08953518 service nova] Releasing lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2144.230252] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2144.230521] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2144.230725] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleting the datastore file [datastore1] 9f3f72ba-60c9-48fb-917f-197e6fc8faef {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2144.230998] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b545ebc3-3192-45e7-a2df-5c0aa12fa6fd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.237566] env[62510]: DEBUG oslo_vmware.api [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for the task: (returnval){ [ 2144.237566] env[62510]: value = "task-1769903" [ 2144.237566] env[62510]: _type = "Task" [ 2144.237566] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.245328] env[62510]: DEBUG oslo_vmware.api [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.270911] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 14f08e1c-bf2a-4dca-9770-8ceb311130e3] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2144.517480] env[62510]: DEBUG oslo_vmware.api [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769901, 'name': PowerOffVM_Task, 'duration_secs': 0.193821} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.517686] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2144.517828] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2144.518090] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bab4bc18-87da-4cd1-86d7-0e318b399c24 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.592216] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2144.592429] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2144.592649] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleting the datastore file [datastore1] ef20eba1-cb3a-4e0f-bbdb-54949e409546 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2144.593019] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1f9f818-3023-4f3e-b68c-bf12acdd0f01 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.599926] env[62510]: DEBUG oslo_vmware.api [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for the task: (returnval){ [ 2144.599926] env[62510]: value = "task-1769905" [ 2144.599926] env[62510]: _type = "Task" [ 2144.599926] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.607496] env[62510]: DEBUG oslo_vmware.api [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.747372] env[62510]: DEBUG oslo_vmware.api [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Task: {'id': task-1769903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130811} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.747616] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2144.747798] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2144.747967] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2144.748155] env[62510]: INFO nova.compute.manager [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2144.748391] env[62510]: DEBUG oslo.service.loopingcall [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2144.748608] env[62510]: DEBUG nova.compute.manager [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2144.748709] env[62510]: DEBUG nova.network.neutron [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2144.773856] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: cb2e8f2c-7bd0-4f8f-baef-ac0cce062d74] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2145.110816] env[62510]: DEBUG oslo_vmware.api [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Task: {'id': task-1769905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137755} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.111814] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2145.111814] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2145.111814] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2145.111814] env[62510]: INFO nova.compute.manager [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2145.111814] env[62510]: DEBUG oslo.service.loopingcall [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2145.112032] env[62510]: DEBUG nova.compute.manager [-] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2145.112091] env[62510]: DEBUG nova.network.neutron [-] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2145.211573] env[62510]: DEBUG nova.compute.manager [req-c49938d1-d0b3-4134-8191-ae223854d048 req-751a0bed-3a3f-45e9-8f02-c49a81e9e803 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Received event network-vif-deleted-47759f10-ede2-4020-b8a8-36effea384c5 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2145.211809] env[62510]: INFO nova.compute.manager [req-c49938d1-d0b3-4134-8191-ae223854d048 req-751a0bed-3a3f-45e9-8f02-c49a81e9e803 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Neutron deleted interface 47759f10-ede2-4020-b8a8-36effea384c5; detaching it from the instance and deleting it from the info cache [ 2145.211920] env[62510]: DEBUG nova.network.neutron [req-c49938d1-d0b3-4134-8191-ae223854d048 req-751a0bed-3a3f-45e9-8f02-c49a81e9e803 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.277230] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e9a2be27-eaea-41f4-aefd-fc15a9bf0e9c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2145.537285] env[62510]: DEBUG nova.compute.manager [req-05529353-ab63-45ab-8b26-7931dfd91556 req-bc3f599e-4beb-47e5-9a4d-91a7be175b6c service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Received event network-vif-deleted-14966b8d-39b1-4552-9912-c1897a3946b2 {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2145.537285] env[62510]: INFO nova.compute.manager [req-05529353-ab63-45ab-8b26-7931dfd91556 req-bc3f599e-4beb-47e5-9a4d-91a7be175b6c service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Neutron deleted interface 14966b8d-39b1-4552-9912-c1897a3946b2; detaching it from the instance and deleting it from the info cache [ 2145.537285] env[62510]: DEBUG nova.network.neutron [req-05529353-ab63-45ab-8b26-7931dfd91556 req-bc3f599e-4beb-47e5-9a4d-91a7be175b6c service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.689068] env[62510]: DEBUG nova.network.neutron [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.714395] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bb1f3b1c-1116-4c7c-b0a5-be3a91f2ef2c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.724851] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8b1e42-9922-4e7e-9e90-234de7bd86a0 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.750091] env[62510]: DEBUG nova.compute.manager [req-c49938d1-d0b3-4134-8191-ae223854d048 req-751a0bed-3a3f-45e9-8f02-c49a81e9e803 service nova] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Detach interface failed, port_id=47759f10-ede2-4020-b8a8-36effea384c5, reason: Instance 9f3f72ba-60c9-48fb-917f-197e6fc8faef could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2145.781315] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9956e5d2-edda-47af-a3df-743ebed1154b] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2146.016445] env[62510]: DEBUG nova.network.neutron [-] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2146.040378] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2c2bd56-cc91-4cd8-8c5a-ffa611e7469b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.050832] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a48f049-dbf4-4b51-90d3-e2d2a174befd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.077709] env[62510]: DEBUG nova.compute.manager [req-05529353-ab63-45ab-8b26-7931dfd91556 req-bc3f599e-4beb-47e5-9a4d-91a7be175b6c service nova] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Detach interface failed, port_id=14966b8d-39b1-4552-9912-c1897a3946b2, reason: Instance ef20eba1-cb3a-4e0f-bbdb-54949e409546 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2146.191484] env[62510]: INFO nova.compute.manager [-] [instance: 9f3f72ba-60c9-48fb-917f-197e6fc8faef] Took 1.44 seconds to deallocate network for instance. [ 2146.284583] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 72f8492b-304a-4451-ab40-4cdfe36b9e19] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2146.519303] env[62510]: INFO nova.compute.manager [-] [instance: ef20eba1-cb3a-4e0f-bbdb-54949e409546] Took 1.41 seconds to deallocate network for instance. [ 2146.697451] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.697715] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.697950] env[62510]: DEBUG nova.objects.instance [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lazy-loading 'resources' on Instance uuid 9f3f72ba-60c9-48fb-917f-197e6fc8faef {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2146.787533] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: c2be17de-175a-401f-8c53-f785aeecfff4] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2147.025980] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2147.251406] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6600d9e3-94f3-477d-b6be-6292c1c87a65 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.259277] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a58f1c-4326-40bc-95c0-da3533372f4b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.289251] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc35cea-c3e5-42c0-b419-adf8a1bd9099 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.291688] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 5cae60b1-c0b1-4ff4-baf9-b8d1885614e8] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2147.298380] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a68a70-e8c3-4351-9056-7af9799d9c5f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.311195] env[62510]: DEBUG nova.compute.provider_tree [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2147.794701] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0c93a909-d08f-466c-bdef-a26fa35cd944] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2147.813851] env[62510]: DEBUG nova.scheduler.client.report [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2148.298265] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 82dceacf-1898-4d86-b1c6-552a24ab565f] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2148.320331] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.622s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.322681] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.297s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.322924] env[62510]: DEBUG nova.objects.instance [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lazy-loading 'resources' on Instance uuid ef20eba1-cb3a-4e0f-bbdb-54949e409546 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2148.340158] env[62510]: INFO nova.scheduler.client.report [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Deleted allocations for instance 9f3f72ba-60c9-48fb-917f-197e6fc8faef [ 2148.802056] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 5cd4af1d-d1e4-4da4-a9fc-ba191e2e124c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2148.846980] env[62510]: DEBUG oslo_concurrency.lockutils [None req-61e88f08-f1fb-4bd9-968d-67823e321de3 tempest-ServerActionsTestOtherB-1185268283 tempest-ServerActionsTestOtherB-1185268283-project-member] Lock "9f3f72ba-60c9-48fb-917f-197e6fc8faef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.734s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.861740] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4ecc8d-141c-4b90-b014-e8a2248a41a2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.870520] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70685cb4-1da0-4766-babe-5f2c8d18b01a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.900887] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82812841-53b0-42b0-9395-424904071c15 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.908260] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d1de26-44e0-45d1-9680-0dadb4a9f84f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.921500] env[62510]: DEBUG nova.compute.provider_tree [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2149.304874] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 0f6e9363-47ac-481e-bc1c-b8f4f9748d9c] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2149.424968] env[62510]: DEBUG nova.scheduler.client.report [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2149.809662] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 92cb4e54-a00e-4974-b134-22d302932e32] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2149.930184] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.946545] env[62510]: INFO nova.scheduler.client.report [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Deleted allocations for instance ef20eba1-cb3a-4e0f-bbdb-54949e409546 [ 2150.313070] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 6ce4ced2-7a6c-48c9-bfd9-452352b4a7e3] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2150.454864] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4070daa8-b24b-4c9a-9b21-0deeae66c980 tempest-AttachVolumeNegativeTest-146397362 tempest-AttachVolumeNegativeTest-146397362-project-member] Lock "ef20eba1-cb3a-4e0f-bbdb-54949e409546" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.971s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.816746] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 5f229f78-6c5d-4170-bdd4-c5522b137949] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2151.320012] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2e24b76d-a770-4f1e-a8f1-a54417f1be81] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2151.823952] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: e7e053be-cb88-4ae0-b157-3006211f77d9] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2152.327198] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 01204162-bf8e-46e0-bcf4-00df9ed7e7ce] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2152.832067] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: ebd2dc4b-8d74-47db-861e-870d41a4150b] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2153.335152] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: cf4160a8-1160-45fc-b9e5-e9526b6c1506] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2153.839379] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 9fe592c1-e23a-46d5-8952-c181709d93e7] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2154.344322] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 1e3e2044-a072-454f-85ba-5cb0bc36b5fd] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2154.848054] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 31772dc9-4f04-42df-9e3b-3200cc72c977] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2155.350683] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8a230335-6388-45fb-a29e-9e63ddb4d5f2] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2155.853939] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2f7b02e8-f658-448f-b6e6-9bfa94c74da4] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2156.357758] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 91a76cc7-7f82-42cf-a379-fc0ba3d04568] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2156.861132] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: bc474f8b-dd3b-4d7a-a8e0-fea5570b3091] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2157.366630] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: c8e69231-2786-47ac-9a44-c194088b8079] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2157.870386] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 841460b0-d917-44ea-88c6-0e5a3022f658] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2158.374492] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 241d842d-3dd5-4ac2-a18a-12b9c9fbd340] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2158.878894] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: f4c5c0ab-2e6d-4ecd-a8e7-1f8a8c0a1095] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2159.383581] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 4e735bb6-f167-4c2b-b44e-d2dd3040603d] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2159.887951] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 8ffa27e9-6a3b-48d1-aed4-c808089788d9] Instance has had 0 of 5 cleanup attempts {{(pid=62510) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11622}} [ 2160.390819] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2177.012357] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquiring lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2177.012661] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2177.515600] env[62510]: DEBUG nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Starting instance... {{(pid=62510) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2178.153438] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.153719] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.155285] env[62510]: INFO nova.compute.claims [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2178.640089] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.640418] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.640657] env[62510]: DEBUG nova.compute.manager [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2178.642111] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea563587-ed38-4373-abca-23fe1254c095 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.650392] env[62510]: DEBUG nova.compute.manager [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62510) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2178.651062] env[62510]: DEBUG nova.objects.instance [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'flavor' on Instance uuid 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.197992] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5ba506-7dfd-4a63-91e3-7712af767e8e {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.206202] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0a107b-c7a5-4ee3-be58-875cda5a87d8 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.238099] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9a9a74-27b7-42a6-8179-4819171ebcf7 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.245715] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95d9f57-2fb4-4396-ab52-01de23c41115 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.259110] env[62510]: DEBUG nova.compute.provider_tree [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2179.657840] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2179.658188] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90e0abb4-307b-45ae-980f-29a609ef4cbc {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.666752] env[62510]: DEBUG oslo_vmware.api [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2179.666752] env[62510]: value = "task-1769920" [ 2179.666752] env[62510]: _type = "Task" [ 2179.666752] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.674852] env[62510]: DEBUG oslo_vmware.api [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.762130] env[62510]: DEBUG nova.scheduler.client.report [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2180.176948] env[62510]: DEBUG oslo_vmware.api [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769920, 'name': PowerOffVM_Task, 'duration_secs': 0.209078} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.177255] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2180.177494] env[62510]: DEBUG nova.compute.manager [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2180.178254] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3321595-111d-414e-a0a2-0835862cef7b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.267109] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.267567] env[62510]: DEBUG nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Start building networks asynchronously for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2180.689122] env[62510]: DEBUG oslo_concurrency.lockutils [None req-2fdd665f-bff0-495f-92a0-442be6da58f6 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.049s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.772324] env[62510]: DEBUG nova.compute.utils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Using /dev/sd instead of None {{(pid=62510) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2180.773670] env[62510]: DEBUG nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Allocating IP information in the background. {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2180.773826] env[62510]: DEBUG nova.network.neutron [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] allocate_for_instance() {{(pid=62510) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2180.820016] env[62510]: DEBUG nova.policy [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ccdf9be5f854ccea69ca77f0a7f39a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a586986a0bdb4a788194717def216857', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62510) authorize /opt/stack/nova/nova/policy.py:192}} [ 2181.083646] env[62510]: DEBUG nova.network.neutron [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Successfully created port: b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2181.097297] env[62510]: DEBUG nova.objects.instance [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'flavor' on Instance uuid 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2181.277603] env[62510]: DEBUG nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Start building block device mappings for instance. {{(pid=62510) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2181.601728] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2181.601899] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2181.602111] env[62510]: DEBUG nova.network.neutron [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2181.602300] env[62510]: DEBUG nova.objects.instance [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'info_cache' on Instance uuid 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2181.782536] env[62510]: INFO nova.virt.block_device [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Booting with volume cf2413e0-72a8-444b-ab18-1314e253a7ff at /dev/sda [ 2181.818293] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2226bcdc-b876-47a5-b964-6592b6d53b67 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.828211] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb860be-33b0-4e63-a498-c0bf07f40eed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.853434] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-848b0ae6-817f-494e-88ef-776377701027 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.862100] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a7b1e0-2805-4e52-92b3-bf714f313b67 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.887534] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0379b77-c6dd-482c-bf07-cb87498542cd {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.894066] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f178a34-1792-4cdd-992b-0d9ec3f19d35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.907767] env[62510]: DEBUG nova.virt.block_device [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updating existing volume attachment record: 305cf287-fe99-4d8b-996a-f036c5990dba {{(pid=62510) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2182.105358] env[62510]: DEBUG nova.objects.base [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Object Instance<70ed3d3b-d436-49f3-8d17-ced3ada4d1e3> lazy-loaded attributes: flavor,info_cache {{(pid=62510) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2182.455083] env[62510]: DEBUG nova.compute.manager [req-e57263f9-af6f-417c-bae0-8105b3fe49ec req-b556ddfb-7401-4ecc-8db9-d473db47303c service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Received event network-vif-plugged-b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2182.455917] env[62510]: DEBUG oslo_concurrency.lockutils [req-e57263f9-af6f-417c-bae0-8105b3fe49ec req-b556ddfb-7401-4ecc-8db9-d473db47303c service nova] Acquiring lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2182.456464] env[62510]: DEBUG oslo_concurrency.lockutils [req-e57263f9-af6f-417c-bae0-8105b3fe49ec req-b556ddfb-7401-4ecc-8db9-d473db47303c service nova] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2182.456907] env[62510]: DEBUG oslo_concurrency.lockutils [req-e57263f9-af6f-417c-bae0-8105b3fe49ec req-b556ddfb-7401-4ecc-8db9-d473db47303c service nova] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2182.457802] env[62510]: DEBUG nova.compute.manager [req-e57263f9-af6f-417c-bae0-8105b3fe49ec req-b556ddfb-7401-4ecc-8db9-d473db47303c service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] No waiting events found dispatching network-vif-plugged-b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2182.458281] env[62510]: WARNING nova.compute.manager [req-e57263f9-af6f-417c-bae0-8105b3fe49ec req-b556ddfb-7401-4ecc-8db9-d473db47303c service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Received unexpected event network-vif-plugged-b4dee9ea-d202-428e-990c-597329ee2a4c for instance with vm_state building and task_state block_device_mapping. [ 2182.537329] env[62510]: DEBUG nova.network.neutron [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Successfully updated port: b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2182.802238] env[62510]: DEBUG nova.network.neutron [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updating instance_info_cache with network_info: [{"id": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "address": "fa:16:3e:31:77:2e", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ba21c65-07", "ovs_interfaceid": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.038704] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquiring lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2183.038888] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquired lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2183.039063] env[62510]: DEBUG nova.network.neutron [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2183.305475] env[62510]: DEBUG oslo_concurrency.lockutils [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2183.570530] env[62510]: DEBUG nova.network.neutron [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Instance cache missing network info. {{(pid=62510) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2183.700843] env[62510]: DEBUG nova.network.neutron [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updating instance_info_cache with network_info: [{"id": "b4dee9ea-d202-428e-990c-597329ee2a4c", "address": "fa:16:3e:76:5f:b3", "network": {"id": "fe0b5b62-8984-4257-bf11-3f67dbad223f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-352819536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a586986a0bdb4a788194717def216857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dee9ea-d2", "ovs_interfaceid": "b4dee9ea-d202-428e-990c-597329ee2a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.999641] env[62510]: DEBUG nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Start spawning the instance on the hypervisor. {{(pid=62510) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2183.999641] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T19:20:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2183.999641] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Flavor limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2183.999641] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Image limits 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2183.999906] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Flavor pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2183.999906] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Image pref 0:0:0 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2184.000014] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62510) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2184.000233] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2184.000387] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2184.000501] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Got 1 possible topologies {{(pid=62510) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2184.000669] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2184.000843] env[62510]: DEBUG nova.virt.hardware [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62510) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2184.001767] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3f76cf-91ec-4a69-ae32-4ad5904e3c1d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.010230] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5526a1fa-7289-4bac-ac7b-dca2d402455d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.203735] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Releasing lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.204154] env[62510]: DEBUG nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Instance network_info: |[{"id": "b4dee9ea-d202-428e-990c-597329ee2a4c", "address": "fa:16:3e:76:5f:b3", "network": {"id": "fe0b5b62-8984-4257-bf11-3f67dbad223f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-352819536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a586986a0bdb4a788194717def216857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dee9ea-d2", "ovs_interfaceid": "b4dee9ea-d202-428e-990c-597329ee2a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62510) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2184.204613] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:5f:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4dee9ea-d202-428e-990c-597329ee2a4c', 'vif_model': 'vmxnet3'}] {{(pid=62510) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2184.212420] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Creating folder: Project (a586986a0bdb4a788194717def216857). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2184.212709] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c68cd20-20d8-47c8-b918-4db3c66ca299 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.226734] env[62510]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 2184.226929] env[62510]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62510) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 2184.227375] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Folder already exists: Project (a586986a0bdb4a788194717def216857). Parent ref: group-v367197. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 2184.227629] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Creating folder: Instances. Parent ref: group-v367525. {{(pid=62510) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2184.227903] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a32b13f-c8b5-4f51-978b-641c89fe2f10 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.239013] env[62510]: INFO nova.virt.vmwareapi.vm_util [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Created folder: Instances in parent group-v367525. [ 2184.239257] env[62510]: DEBUG oslo.service.loopingcall [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2184.239455] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Creating VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2184.239658] env[62510]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9327998a-2bd8-42d4-adcb-48fd7b9cd2ef {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.258225] env[62510]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2184.258225] env[62510]: value = "task-1769923" [ 2184.258225] env[62510]: _type = "Task" [ 2184.258225] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.265982] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769923, 'name': CreateVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.311916] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2184.312330] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa0d5e6d-4c9c-4f65-bc64-9b551f40a510 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.320206] env[62510]: DEBUG oslo_vmware.api [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2184.320206] env[62510]: value = "task-1769924" [ 2184.320206] env[62510]: _type = "Task" [ 2184.320206] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.330192] env[62510]: DEBUG oslo_vmware.api [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769924, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.476176] env[62510]: DEBUG nova.compute.manager [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Received event network-changed-b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2184.476349] env[62510]: DEBUG nova.compute.manager [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Refreshing instance network info cache due to event network-changed-b4dee9ea-d202-428e-990c-597329ee2a4c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2184.476569] env[62510]: DEBUG oslo_concurrency.lockutils [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] Acquiring lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2184.476715] env[62510]: DEBUG oslo_concurrency.lockutils [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] Acquired lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.476898] env[62510]: DEBUG nova.network.neutron [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Refreshing network info cache for port b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2184.767639] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769923, 'name': CreateVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.831690] env[62510]: DEBUG oslo_vmware.api [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769924, 'name': PowerOnVM_Task, 'duration_secs': 0.414463} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.831990] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2184.832216] env[62510]: DEBUG nova.compute.manager [None req-8b092aef-c566-4a4e-b5dd-8e3112aa095e tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2184.832982] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2927a0b8-558f-4534-aa3c-360e72d9864d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.156739] env[62510]: DEBUG nova.network.neutron [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updated VIF entry in instance network info cache for port b4dee9ea-d202-428e-990c-597329ee2a4c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2185.157147] env[62510]: DEBUG nova.network.neutron [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updating instance_info_cache with network_info: [{"id": "b4dee9ea-d202-428e-990c-597329ee2a4c", "address": "fa:16:3e:76:5f:b3", "network": {"id": "fe0b5b62-8984-4257-bf11-3f67dbad223f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-352819536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a586986a0bdb4a788194717def216857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dee9ea-d2", "ovs_interfaceid": "b4dee9ea-d202-428e-990c-597329ee2a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.268901] env[62510]: DEBUG oslo_vmware.api [-] Task: {'id': task-1769923, 'name': CreateVM_Task, 'duration_secs': 0.751695} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.269111] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Created VM on the ESX host {{(pid=62510) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2185.269783] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367528', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'name': 'volume-cf2413e0-72a8-444b-ab18-1314e253a7ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'serial': 'cf2413e0-72a8-444b-ab18-1314e253a7ff'}, 'attachment_id': '305cf287-fe99-4d8b-996a-f036c5990dba', 'mount_device': '/dev/sda', 'device_type': None, 'disk_bus': None, 'guest_format': None, 'boot_index': 0, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=62510) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2185.269994] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Root volume attach. Driver type: vmdk {{(pid=62510) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2185.270790] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fb5f2e-fef5-4a8f-8de6-abfbf8e6c8cb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.278925] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdc81c9-ac3f-48b7-b937-da0f156eab2f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.284961] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671d8335-ccb2-4054-87c9-1dfaa398c489 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.290827] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4c243eb1-110e-4358-9296-bef8d8cdfa5b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.298115] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2185.298115] env[62510]: value = "task-1769925" [ 2185.298115] env[62510]: _type = "Task" [ 2185.298115] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.308784] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769925, 'name': RelocateVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.659915] env[62510]: DEBUG oslo_concurrency.lockutils [req-69732764-a0a3-41f3-ba64-096a929e95d7 req-4fb9506e-09ce-4ee0-baf2-8ae932f87d08 service nova] Releasing lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2185.808043] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769925, 'name': RelocateVM_Task, 'duration_secs': 0.350494} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.808373] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Volume attach. Driver type: vmdk {{(pid=62510) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2185.808545] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367528', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'name': 'volume-cf2413e0-72a8-444b-ab18-1314e253a7ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'serial': 'cf2413e0-72a8-444b-ab18-1314e253a7ff'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2185.809313] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20f875e-6276-4923-9bcd-001bfeff5c17 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.824442] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b764c9-f07e-4bae-bc03-ddbc0061d54d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.846079] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] volume-cf2413e0-72a8-444b-ab18-1314e253a7ff/volume-cf2413e0-72a8-444b-ab18-1314e253a7ff.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2185.846629] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5a26ef0-5f48-4fa5-aa83-60a8720b834b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.866532] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2185.866532] env[62510]: value = "task-1769926" [ 2185.866532] env[62510]: _type = "Task" [ 2185.866532] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2185.873805] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.216494] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76576900-e677-4271-8dcd-d4e6f50e35e6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.225088] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0c78b9-e8fc-4ab9-ba86-ea14f79f6418 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Suspending the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2186.225374] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8e7fa302-859b-4538-bc98-ed711efe26df {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.232562] env[62510]: DEBUG oslo_vmware.api [None req-ea0c78b9-e8fc-4ab9-ba86-ea14f79f6418 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2186.232562] env[62510]: value = "task-1769927" [ 2186.232562] env[62510]: _type = "Task" [ 2186.232562] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.240574] env[62510]: DEBUG oslo_vmware.api [None req-ea0c78b9-e8fc-4ab9-ba86-ea14f79f6418 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769927, 'name': SuspendVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.376648] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769926, 'name': ReconfigVM_Task, 'duration_secs': 0.289006} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.376958] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Reconfigured VM instance instance-0000007f to attach disk [datastore1] volume-cf2413e0-72a8-444b-ab18-1314e253a7ff/volume-cf2413e0-72a8-444b-ab18-1314e253a7ff.vmdk or device None with type thin {{(pid=62510) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2186.381795] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba4bfdc3-bae1-4d4f-a107-7f3bbcc0c834 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.398876] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2186.398876] env[62510]: value = "task-1769928" [ 2186.398876] env[62510]: _type = "Task" [ 2186.398876] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.408502] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769928, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.742920] env[62510]: DEBUG oslo_vmware.api [None req-ea0c78b9-e8fc-4ab9-ba86-ea14f79f6418 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769927, 'name': SuspendVM_Task} progress is 66%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2186.908472] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769928, 'name': ReconfigVM_Task, 'duration_secs': 0.196664} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2186.909985] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367528', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'name': 'volume-cf2413e0-72a8-444b-ab18-1314e253a7ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'serial': 'cf2413e0-72a8-444b-ab18-1314e253a7ff'} {{(pid=62510) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2186.909985] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45a6d7f0-cd60-49a0-9601-671fada276ed {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2186.916278] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2186.916278] env[62510]: value = "task-1769929" [ 2186.916278] env[62510]: _type = "Task" [ 2186.916278] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2186.923628] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769929, 'name': Rename_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.244094] env[62510]: DEBUG oslo_vmware.api [None req-ea0c78b9-e8fc-4ab9-ba86-ea14f79f6418 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769927, 'name': SuspendVM_Task, 'duration_secs': 0.618969} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.244366] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-ea0c78b9-e8fc-4ab9-ba86-ea14f79f6418 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Suspended the VM {{(pid=62510) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2187.244576] env[62510]: DEBUG nova.compute.manager [None req-ea0c78b9-e8fc-4ab9-ba86-ea14f79f6418 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2187.245305] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5e7806-0cb4-4c59-84f1-cfce7abd201c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.426822] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769929, 'name': Rename_Task, 'duration_secs': 0.134716} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.427106] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Powering on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2187.427352] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0523cc17-8610-49b3-802c-547e8898d8fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.434386] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2187.434386] env[62510]: value = "task-1769930" [ 2187.434386] env[62510]: _type = "Task" [ 2187.434386] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.442214] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769930, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.945260] env[62510]: DEBUG oslo_vmware.api [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769930, 'name': PowerOnVM_Task, 'duration_secs': 0.488999} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2187.945617] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Powered on the VM {{(pid=62510) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2187.945932] env[62510]: INFO nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Took 3.95 seconds to spawn the instance on the hypervisor. [ 2187.945932] env[62510]: DEBUG nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2187.946690] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2b7546-0fbb-4379-8588-006492b68793 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.462711] env[62510]: INFO nova.compute.manager [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Took 10.44 seconds to build instance. [ 2188.557624] env[62510]: INFO nova.compute.manager [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Resuming [ 2188.557863] env[62510]: DEBUG nova.objects.instance [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'flavor' on Instance uuid 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2188.965254] env[62510]: DEBUG oslo_concurrency.lockutils [None req-4abd1479-dabc-4480-bd78-bf36b3940cdd tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.952s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.094499] env[62510]: DEBUG nova.compute.manager [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Received event network-changed-b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2189.094661] env[62510]: DEBUG nova.compute.manager [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Refreshing instance network info cache due to event network-changed-b4dee9ea-d202-428e-990c-597329ee2a4c. {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11517}} [ 2189.094887] env[62510]: DEBUG oslo_concurrency.lockutils [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] Acquiring lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.095018] env[62510]: DEBUG oslo_concurrency.lockutils [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] Acquired lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.095274] env[62510]: DEBUG nova.network.neutron [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Refreshing network info cache for port b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2189.793754] env[62510]: DEBUG nova.network.neutron [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updated VIF entry in instance network info cache for port b4dee9ea-d202-428e-990c-597329ee2a4c. {{(pid=62510) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2189.794169] env[62510]: DEBUG nova.network.neutron [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updating instance_info_cache with network_info: [{"id": "b4dee9ea-d202-428e-990c-597329ee2a4c", "address": "fa:16:3e:76:5f:b3", "network": {"id": "fe0b5b62-8984-4257-bf11-3f67dbad223f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-352819536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a586986a0bdb4a788194717def216857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dee9ea-d2", "ovs_interfaceid": "b4dee9ea-d202-428e-990c-597329ee2a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2190.069344] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2190.069688] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquired lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2190.069727] env[62510]: DEBUG nova.network.neutron [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Building network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2190.297069] env[62510]: DEBUG oslo_concurrency.lockutils [req-549e0e1d-4964-4875-9e89-c2a17c2a3923 req-1804efde-5768-4fd5-9e2c-b4afecfade6d service nova] Releasing lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2190.780509] env[62510]: DEBUG nova.network.neutron [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updating instance_info_cache with network_info: [{"id": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "address": "fa:16:3e:31:77:2e", "network": {"id": "e49618de-aacc-4b42-8a2e-7e2dc945a3b1", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-883053645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b13a257970e4a9a9f9cfecaaf37d9da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "73b1ea51-8078-4169-921e-d5a224120ab4", "external-id": "nsx-vlan-transportzone-689", "segmentation_id": 689, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ba21c65-07", "ovs_interfaceid": "3ba21c65-0774-4217-9f6c-e8399b1f82db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2191.283575] env[62510]: DEBUG oslo_concurrency.lockutils [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Releasing lock "refresh_cache-70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.285010] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab789e51-9f6d-44b6-9e30-342e336d9650 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.291856] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Resuming the VM {{(pid=62510) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2191.292099] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b04e742f-bf45-4723-89ae-476d85eb315b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.298539] env[62510]: DEBUG oslo_vmware.api [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2191.298539] env[62510]: value = "task-1769931" [ 2191.298539] env[62510]: _type = "Task" [ 2191.298539] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.311918] env[62510]: DEBUG oslo_vmware.api [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769931, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.809221] env[62510]: DEBUG oslo_vmware.api [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769931, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.310045] env[62510]: DEBUG oslo_vmware.api [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769931, 'name': PowerOnVM_Task, 'duration_secs': 0.52177} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.310383] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Resumed the VM {{(pid=62510) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2192.310430] env[62510]: DEBUG nova.compute.manager [None req-a51b2ea9-4570-4300-bd61-59b47291a395 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2192.311224] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850da364-9737-4b6d-be7b-516698354d6a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.207054] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.207054] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.207054] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.207054] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.207444] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.209182] env[62510]: INFO nova.compute.manager [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Terminating instance [ 2193.713152] env[62510]: DEBUG nova.compute.manager [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2193.713482] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2193.714776] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2e88d1-fc58-446a-8f6e-90033ead18e4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.722486] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2193.722708] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad1f20b8-1ebc-4c18-ad6d-c145d6025049 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.728719] env[62510]: DEBUG oslo_vmware.api [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2193.728719] env[62510]: value = "task-1769932" [ 2193.728719] env[62510]: _type = "Task" [ 2193.728719] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.736868] env[62510]: DEBUG oslo_vmware.api [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769932, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.238803] env[62510]: DEBUG oslo_vmware.api [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769932, 'name': PowerOffVM_Task, 'duration_secs': 0.179046} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.239086] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2194.239289] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2194.239538] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-074e524b-bb1c-43a6-9ac0-468251e11a30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.325838] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2194.326072] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2194.326258] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleting the datastore file [datastore1] 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2194.326552] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57b96882-31fa-4c31-9b25-64edce2de8fa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.333040] env[62510]: DEBUG oslo_vmware.api [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for the task: (returnval){ [ 2194.333040] env[62510]: value = "task-1769934" [ 2194.333040] env[62510]: _type = "Task" [ 2194.333040] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.340439] env[62510]: DEBUG oslo_vmware.api [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.843807] env[62510]: DEBUG oslo_vmware.api [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Task: {'id': task-1769934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145883} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.844816] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2194.844816] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2194.844925] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2194.845095] env[62510]: INFO nova.compute.manager [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2194.845349] env[62510]: DEBUG oslo.service.loopingcall [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2194.845547] env[62510]: DEBUG nova.compute.manager [-] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2194.845643] env[62510]: DEBUG nova.network.neutron [-] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2195.289872] env[62510]: DEBUG nova.compute.manager [req-1112c5ea-2ef8-4c93-923b-af74623ee257 req-5338361d-ad5e-4774-aca5-120153c325e4 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Received event network-vif-deleted-3ba21c65-0774-4217-9f6c-e8399b1f82db {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2195.290076] env[62510]: INFO nova.compute.manager [req-1112c5ea-2ef8-4c93-923b-af74623ee257 req-5338361d-ad5e-4774-aca5-120153c325e4 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Neutron deleted interface 3ba21c65-0774-4217-9f6c-e8399b1f82db; detaching it from the instance and deleting it from the info cache [ 2195.290307] env[62510]: DEBUG nova.network.neutron [req-1112c5ea-2ef8-4c93-923b-af74623ee257 req-5338361d-ad5e-4774-aca5-120153c325e4 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.766050] env[62510]: DEBUG nova.network.neutron [-] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2195.793373] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59d862f0-32b5-4be6-a987-1706543b0472 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.802933] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eed1eba-27f5-4f6d-87e8-bb000bbc054d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.828700] env[62510]: DEBUG nova.compute.manager [req-1112c5ea-2ef8-4c93-923b-af74623ee257 req-5338361d-ad5e-4774-aca5-120153c325e4 service nova] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Detach interface failed, port_id=3ba21c65-0774-4217-9f6c-e8399b1f82db, reason: Instance 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2196.268452] env[62510]: INFO nova.compute.manager [-] [instance: 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3] Took 1.42 seconds to deallocate network for instance. [ 2196.775916] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.776226] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.776454] env[62510]: DEBUG nova.objects.instance [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lazy-loading 'resources' on Instance uuid 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2197.321962] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef22fc17-8f85-4ccc-ba6d-d98c7014fa30 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.329096] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a7cc9a-074a-4b50-bd89-9f7b1470c47c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.358853] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a16141-f4d0-4e2f-8daa-84414f838c09 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.365631] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e53a67-e2b3-4650-bb41-f3515802aca5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.378270] env[62510]: DEBUG nova.compute.provider_tree [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2197.897412] env[62510]: ERROR nova.scheduler.client.report [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] [req-ec287f16-4461-4970-8c5f-20fa77e5a83c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c3653102-341b-4ed1-8b1f-1abaf8aa3e56. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ec287f16-4461-4970-8c5f-20fa77e5a83c"}]} [ 2197.913480] env[62510]: DEBUG nova.scheduler.client.report [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Refreshing inventories for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2197.925225] env[62510]: DEBUG nova.scheduler.client.report [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating ProviderTree inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2197.925427] env[62510]: DEBUG nova.compute.provider_tree [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2197.935221] env[62510]: DEBUG nova.scheduler.client.report [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Refreshing aggregate associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, aggregates: None {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2197.952072] env[62510]: DEBUG nova.scheduler.client.report [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Refreshing trait associations for resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64 {{(pid=62510) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2197.985076] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4280fd2-04ba-43be-9e28-4fc368218709 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.992547] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511d8b48-9857-4500-8e4b-a6e3be54ecb1 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.023030] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff1d41d-ab9e-4f6d-95fa-5efe35a6c1a9 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.030012] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fdc672-c080-48fc-8340-325195411db4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.042801] env[62510]: DEBUG nova.compute.provider_tree [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2198.572297] env[62510]: DEBUG nova.scheduler.client.report [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updated inventory for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with generation 188 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2198.572656] env[62510]: DEBUG nova.compute.provider_tree [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating resource provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 generation from 188 to 189 during operation: update_inventory {{(pid=62510) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2198.572919] env[62510]: DEBUG nova.compute.provider_tree [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Updating inventory in ProviderTree for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2199.079189] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.303s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2199.098915] env[62510]: INFO nova.scheduler.client.report [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Deleted allocations for instance 70ed3d3b-d436-49f3-8d17-ced3ada4d1e3 [ 2199.606462] env[62510]: DEBUG oslo_concurrency.lockutils [None req-bd45cb35-cc0e-4988-8be5-c71f1070c3f8 tempest-ServerActionsTestJSON-742903676 tempest-ServerActionsTestJSON-742903676-project-member] Lock "70ed3d3b-d436-49f3-8d17-ced3ada4d1e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.400s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.893939] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.894230] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.399923] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.399923] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Starting heal instance info cache {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10309}} [ 2202.399923] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Rebuilding the list of instances to heal {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10313}} [ 2202.928978] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.929270] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquired lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.929308] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Forcefully refreshing network info cache for instance {{(pid=62510) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2202.929468] env[62510]: DEBUG nova.objects.instance [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lazy-loading 'info_cache' on Instance uuid 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2204.647843] env[62510]: DEBUG nova.network.neutron [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updating instance_info_cache with network_info: [{"id": "b4dee9ea-d202-428e-990c-597329ee2a4c", "address": "fa:16:3e:76:5f:b3", "network": {"id": "fe0b5b62-8984-4257-bf11-3f67dbad223f", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-352819536-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.251", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a586986a0bdb4a788194717def216857", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4dee9ea-d2", "ovs_interfaceid": "b4dee9ea-d202-428e-990c-597329ee2a4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.150437] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Releasing lock "refresh_cache-2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" {{(pid=62510) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.150714] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updated the network info_cache for instance {{(pid=62510) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10380}} [ 2205.150941] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.151131] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.151328] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.151503] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.151652] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.151797] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.151926] env[62510]: DEBUG nova.compute.manager [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62510) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10928}} [ 2205.152083] env[62510]: DEBUG oslo_service.periodic_task [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Running periodic task ComputeManager.update_available_resource {{(pid=62510) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2205.655736] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.656155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.656155] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.656298] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62510) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2205.657204] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097e551a-40a6-42c6-ad06-901def6aa087 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.665709] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9690047f-d03d-4042-a229-abe2aa6b72e2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.679391] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc770a38-77d8-453b-862a-a9fec9d079f2 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.685296] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ddeee0-4348-46b1-91de-db24d507f47f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.714553] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181152MB free_disk=167GB free_vcpus=48 pci_devices=None {{(pid=62510) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2205.714688] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.714880] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.739833] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c actively managed on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192}}. {{(pid=62510) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2206.740152] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2206.740207] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62510) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2206.765210] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50bf1d3-ff2f-4d4f-ba83-5d7b77da921a {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.772573] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae9ee2d-4543-4164-ab0d-7a43c0609eb6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.801906] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9085e45b-d47f-4cfe-b8f7-cc18862eb38b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.808802] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bcda1e-9ee1-4d45-901e-5e3574e07e14 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.821709] env[62510]: DEBUG nova.compute.provider_tree [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2207.324583] env[62510]: DEBUG nova.scheduler.client.report [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2207.829623] env[62510]: DEBUG nova.compute.resource_tracker [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62510) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2207.830041] env[62510]: DEBUG oslo_concurrency.lockutils [None req-03d9ecb2-490c-45b1-b893-a0f90059076e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.115s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2225.843287] env[62510]: INFO nova.compute.manager [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Rebuilding instance [ 2225.887623] env[62510]: DEBUG nova.compute.manager [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Checking state {{(pid=62510) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2225.888498] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c1fb24-d37f-44a0-b043-0cfffc1bb278 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.902316] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2226.902673] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15285797-5a54-4c1f-867b-19849753eaa3 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.911079] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2226.911079] env[62510]: value = "task-1769935" [ 2226.911079] env[62510]: _type = "Task" [ 2226.911079] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.919088] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769935, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.420886] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769935, 'name': PowerOffVM_Task, 'duration_secs': 0.174563} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.421159] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Powered off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2227.421900] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Powering off the VM {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2227.422158] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ad40a64-2ab4-4d97-8764-ba4a3bda1cfa {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.428202] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2227.428202] env[62510]: value = "task-1769936" [ 2227.428202] env[62510]: _type = "Task" [ 2227.428202] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.439401] env[62510]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] VM already powered off {{(pid=62510) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2227.439613] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2227.439820] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367528', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'name': 'volume-cf2413e0-72a8-444b-ab18-1314e253a7ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'serial': 'cf2413e0-72a8-444b-ab18-1314e253a7ff'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2227.440525] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d66bac-3992-4a8d-b986-b90b383a699c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.457537] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdf1a4b-138b-49c5-8293-a51564ecdc9f {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.463320] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ae814c-c159-42db-844c-dce37c4138ca {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.479766] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cac37f-e61f-47ca-bb18-6b7005830ce6 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.495381] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] The volume has not been displaced from its original location: [datastore1] volume-cf2413e0-72a8-444b-ab18-1314e253a7ff/volume-cf2413e0-72a8-444b-ab18-1314e253a7ff.vmdk. No consolidation needed. {{(pid=62510) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2227.500479] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2227.500716] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99203517-bafc-46c7-b9f4-2ae1eded9709 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.518088] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2227.518088] env[62510]: value = "task-1769937" [ 2227.518088] env[62510]: _type = "Task" [ 2227.518088] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.525226] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.027931] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769937, 'name': ReconfigVM_Task, 'duration_secs': 0.158592} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2228.028288] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=62510) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2228.032804] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f14fa183-f4d7-4569-85a7-294cd66abe02 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.047753] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2228.047753] env[62510]: value = "task-1769938" [ 2228.047753] env[62510]: _type = "Task" [ 2228.047753] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.055245] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769938, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.558178] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769938, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.059496] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769938, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.559679] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769938, 'name': ReconfigVM_Task, 'duration_secs': 1.131694} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.559972] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-367528', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'name': 'volume-cf2413e0-72a8-444b-ab18-1314e253a7ff', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c', 'attached_at': '', 'detached_at': '', 'volume_id': 'cf2413e0-72a8-444b-ab18-1314e253a7ff', 'serial': 'cf2413e0-72a8-444b-ab18-1314e253a7ff'} {{(pid=62510) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2229.560258] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2229.560978] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ec4d78-cda7-47c4-8be0-d0eeea34e112 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.566984] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Unregistering the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2229.567208] env[62510]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9d24ea4-e213-42ee-8046-0fdc53e28bd4 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.635014] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Unregistered the VM {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2229.635239] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Deleting contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2229.635427] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Deleting the datastore file [datastore1] 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2229.635681] env[62510]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-572c56a7-7ece-4d6b-a520-bbf3a3905558 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.642051] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for the task: (returnval){ [ 2229.642051] env[62510]: value = "task-1769940" [ 2229.642051] env[62510]: _type = "Task" [ 2229.642051] env[62510]: } to complete. {{(pid=62510) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.649405] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.151736] env[62510]: DEBUG oslo_vmware.api [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Task: {'id': task-1769940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07362} completed successfully. {{(pid=62510) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.152114] env[62510]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Deleted the datastore file {{(pid=62510) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2230.152219] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Deleted contents of the VM from datastore datastore1 {{(pid=62510) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2230.152398] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2230.208200] env[62510]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Volume detach. Driver type: vmdk {{(pid=62510) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2230.208513] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-411d66ed-6c9a-4ff4-8ef0-b8e8d2880c22 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.217638] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c6cade-ebe3-49a8-8558-598e3b2d83d5 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.242092] env[62510]: ERROR nova.compute.manager [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Failed to detach volume cf2413e0-72a8-444b-ab18-1314e253a7ff from /dev/sda: nova.exception.InstanceNotFound: Instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c could not be found. [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Traceback (most recent call last): [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self.driver.rebuild(**kwargs) [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] raise NotImplementedError() [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] NotImplementedError [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] During handling of the above exception, another exception occurred: [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Traceback (most recent call last): [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self.driver.detach_volume(context, old_connection_info, [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] return self._volumeops.detach_volume(connection_info, instance) [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self._detach_volume_vmdk(connection_info, instance) [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] stable_ref.fetch_moref(session) [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] nova.exception.InstanceNotFound: Instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c could not be found. [ 2230.242092] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2230.357639] env[62510]: DEBUG nova.compute.utils [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Build of instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c aborted: Failed to rebuild volume backed instance. {{(pid=62510) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2230.361234] env[62510]: ERROR nova.compute.manager [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c aborted: Failed to rebuild volume backed instance. [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Traceback (most recent call last): [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self.driver.rebuild(**kwargs) [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] raise NotImplementedError() [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] NotImplementedError [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] During handling of the above exception, another exception occurred: [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Traceback (most recent call last): [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self._detach_root_volume(context, instance, root_bdm) [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] with excutils.save_and_reraise_exception(): [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self.force_reraise() [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] raise self.value [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self.driver.detach_volume(context, old_connection_info, [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] return self._volumeops.detach_volume(connection_info, instance) [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self._detach_volume_vmdk(connection_info, instance) [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] stable_ref.fetch_moref(session) [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] raise exception.InstanceNotFound(instance_id=self._uuid) [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] nova.exception.InstanceNotFound: Instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c could not be found. [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] During handling of the above exception, another exception occurred: [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Traceback (most recent call last): [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 11246, in _error_out_instance_on_exception [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] yield [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 2230.361234] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self._do_rebuild_instance_with_claim( [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self._do_rebuild_instance( [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self._rebuild_default_impl(**kwargs) [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] self._rebuild_volume_backed_instance( [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] raise exception.BuildAbortException( [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] nova.exception.BuildAbortException: Build of instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c aborted: Failed to rebuild volume backed instance. [ 2230.362401] env[62510]: ERROR nova.compute.manager [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] [ 2232.376842] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.377207] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.391850] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd02e1fa-9808-4076-995b-5589db33ad35 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.399785] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6537b924-15cb-4090-93f6-c5bb35dfdc95 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.430323] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b48ba5-bb51-49fc-a485-09e41ebb2e9b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.437741] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7923a7-7e63-46e1-9ec4-c36ab291354d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.451236] env[62510]: DEBUG nova.compute.provider_tree [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2232.812451] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquiring lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.812718] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.813041] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquiring lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2232.813245] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2232.813428] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.816979] env[62510]: INFO nova.compute.manager [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Terminating instance [ 2232.954495] env[62510]: DEBUG nova.scheduler.client.report [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2233.321259] env[62510]: DEBUG nova.compute.manager [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Start destroying the instance on the hypervisor. {{(pid=62510) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2233.321620] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-389c9875-a1a6-455d-9156-c144c9313176 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.333886] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86d4f1c-a514-4409-8bc2-7eb80b6d0143 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.356873] env[62510]: WARNING nova.virt.vmwareapi.driver [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c could not be found. [ 2233.357098] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Destroying instance {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2233.357367] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b10a9db1-2c3f-44e7-b1de-1e84c2429bbf {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.364833] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18010486-551d-4d68-a8dc-0a5cd9e51110 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.386928] env[62510]: WARNING nova.virt.vmwareapi.vmops [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c could not be found. [ 2233.387211] env[62510]: DEBUG nova.virt.vmwareapi.vmops [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Instance destroyed {{(pid=62510) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2233.387310] env[62510]: INFO nova.compute.manager [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Took 0.07 seconds to destroy the instance on the hypervisor. [ 2233.387571] env[62510]: DEBUG oslo.service.loopingcall [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62510) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2233.387774] env[62510]: DEBUG nova.compute.manager [-] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Deallocating network for instance {{(pid=62510) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2233.387874] env[62510]: DEBUG nova.network.neutron [-] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] deallocate_for_instance() {{(pid=62510) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2233.460286] env[62510]: DEBUG oslo_concurrency.lockutils [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.083s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2233.460499] env[62510]: INFO nova.compute.manager [None req-b0b3f66f-7933-4a52-9a69-cf2250ce28a0 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Successfully reverted task state from rebuilding on failure for instance. [ 2233.833428] env[62510]: DEBUG nova.compute.manager [req-14b2bc99-ddb6-4503-a964-64f12d335d1c req-c29cb14c-4b67-4f11-9827-85fc2a32b84a service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Received event network-vif-deleted-b4dee9ea-d202-428e-990c-597329ee2a4c {{(pid=62510) external_instance_event /opt/stack/nova/nova/compute/manager.py:11512}} [ 2233.833644] env[62510]: INFO nova.compute.manager [req-14b2bc99-ddb6-4503-a964-64f12d335d1c req-c29cb14c-4b67-4f11-9827-85fc2a32b84a service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Neutron deleted interface b4dee9ea-d202-428e-990c-597329ee2a4c; detaching it from the instance and deleting it from the info cache [ 2233.833816] env[62510]: DEBUG nova.network.neutron [req-14b2bc99-ddb6-4503-a964-64f12d335d1c req-c29cb14c-4b67-4f11-9827-85fc2a32b84a service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2234.310270] env[62510]: DEBUG nova.network.neutron [-] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Updating instance_info_cache with network_info: [] {{(pid=62510) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2234.337649] env[62510]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-19e5ef87-7431-4e54-a946-d7fbf0244b0d {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.347645] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53757064-a7aa-4cdc-8e3e-4101b9502e3c {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.371519] env[62510]: DEBUG nova.compute.manager [req-14b2bc99-ddb6-4503-a964-64f12d335d1c req-c29cb14c-4b67-4f11-9827-85fc2a32b84a service nova] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Detach interface failed, port_id=b4dee9ea-d202-428e-990c-597329ee2a4c, reason: Instance 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c could not be found. {{(pid=62510) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11346}} [ 2234.812742] env[62510]: INFO nova.compute.manager [-] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Took 1.42 seconds to deallocate network for instance. [ 2235.355858] env[62510]: INFO nova.compute.manager [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Took 0.54 seconds to detach 1 volumes for instance. [ 2235.358279] env[62510]: DEBUG nova.compute.manager [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] [instance: 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c] Deleting volume: cf2413e0-72a8-444b-ab18-1314e253a7ff {{(pid=62510) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2235.898902] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.899204] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.899426] env[62510]: DEBUG nova.objects.instance [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lazy-loading 'resources' on Instance uuid 2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c {{(pid=62510) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2236.418512] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83baac4-a785-43c3-9be8-02f8c6c2cbfb {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.428763] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b26901-46f3-43e3-93b8-b69e508f9515 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.463191] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535ab8a5-c36c-445c-9094-868b252bc34b {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.470932] env[62510]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e48d37-3f94-44e7-8c73-39d56b154d67 {{(pid=62510) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.484077] env[62510]: DEBUG nova.compute.provider_tree [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Inventory has not changed in ProviderTree for provider: c3653102-341b-4ed1-8b1f-1abaf8aa3e56 {{(pid=62510) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2236.987935] env[62510]: DEBUG nova.scheduler.client.report [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Inventory has not changed for provider c3653102-341b-4ed1-8b1f-1abaf8aa3e56 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 167, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62510) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2237.492929] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.594s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2238.012975] env[62510]: DEBUG oslo_concurrency.lockutils [None req-e1a32b4a-7a5e-41a2-aa3d-a71d9db0e9a5 tempest-ServerActionsV293TestJSON-685885653 tempest-ServerActionsV293TestJSON-685885653-project-member] Lock "2072a4ed-0367-41b3-b1a1-f4e97b3eaf4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.200s {{(pid=62510) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}